diff --git a/README.md b/README.md new file mode 100644 index 000000000..2c245a214 --- /dev/null +++ b/README.md @@ -0,0 +1,3 @@ +The Genome Analysis Toolkit +============ +See http://www.broadinstitute.org/gatk/ diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/BaseQualityRankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/BaseQualityRankSumTest.java index a3a9e50e9..9ba468191 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/BaseQualityRankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/BaseQualityRankSumTest.java @@ -47,13 +47,11 @@ package org.broadinstitute.sting.gatk.walkers.annotator; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnotation; -import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; -import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.sting.utils.sam.ReadUtils; import org.broadinstitute.variant.vcf.VCFHeaderLineType; import org.broadinstitute.variant.vcf.VCFInfoHeaderLine; import org.broadinstitute.sting.utils.pileup.PileupElement; -import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; -import org.broadinstitute.variant.variantcontext.Allele; import java.util.*; @@ -67,41 +65,19 @@ import java.util.*; *

The base quality rank sum test can not be calculated for sites without a mixture of reads showing both the reference and alternate alleles.

*/ public class BaseQualityRankSumTest extends RankSumTest implements StandardAnnotation { + @Override public List getKeyNames() { return Arrays.asList("BaseQRankSum"); } + @Override public List getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("BaseQRankSum", 1, VCFHeaderLineType.Float, "Z-score from Wilcoxon rank sum test of Alt Vs. Ref base qualities")); } - protected void fillQualsFromPileup(final List allAlleles, final int refLoc, - final ReadBackedPileup pileup, - final PerReadAlleleLikelihoodMap alleleLikelihoodMap, - final List refQuals, final List altQuals){ - - if (alleleLikelihoodMap == null) { - // use fast SNP-based version if we don't have per-read allele likelihoods - for ( final PileupElement p : pileup ) { - if ( isUsableBase(p) ) { - if ( allAlleles.get(0).equals(Allele.create(p.getBase(),true)) ) { - refQuals.add((double)p.getQual()); - } else if ( allAlleles.contains(Allele.create(p.getBase()))) { - altQuals.add((double)p.getQual()); - } - } - } - return; - } - - for (Map el : alleleLikelihoodMap.getLikelihoodMapValues()) { - final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el); - if (! a.isInformative()) - continue; // read is non-informative - if (a.getMostLikelyAllele().isReference()) - refQuals.add(-10.0*(double)el.get(a.getMostLikelyAllele())); - else if (allAlleles.contains(a.getMostLikelyAllele())) - altQuals.add(-10.0*(double)el.get(a.getMostLikelyAllele())); - - - } + @Override + protected Double getElementForRead(final GATKSAMRecord read, final int refLoc) { + return (double)read.getBaseQualities()[ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, refLoc, ReadUtils.ClippingTail.RIGHT_TAIL)]; } - + @Override + protected Double getElementForPileupElement(final PileupElement p) { + return (double)p.getQual(); + } } \ No newline at end of file diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ClippingRankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ClippingRankSumTest.java index 366512119..eaa9df128 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ClippingRankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ClippingRankSumTest.java @@ -46,14 +46,11 @@ package org.broadinstitute.sting.gatk.walkers.annotator; -import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; -import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.pileup.PileupElement; import org.broadinstitute.variant.vcf.VCFHeaderLineType; import org.broadinstitute.variant.vcf.VCFInfoHeaderLine; -import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; import org.broadinstitute.sting.utils.sam.AlignmentUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; -import org.broadinstitute.variant.variantcontext.Allele; import java.util.*; @@ -69,31 +66,14 @@ import java.util.*; * @since 6/28/12 */ public class ClippingRankSumTest extends RankSumTest { - + @Override public List getKeyNames() { return Arrays.asList("ClippingRankSum"); } + @Override public List getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("ClippingRankSum", 1, VCFHeaderLineType.Float, "Z-score From Wilcoxon rank sum test of Alt vs. Ref number of hard clipped bases")); } - - protected void fillQualsFromPileup(final List allAlleles, - final int refLoc, - final ReadBackedPileup pileup, - final PerReadAlleleLikelihoodMap likelihoodMap, final List refQuals, final List altQuals) { - // todo - only support non-pileup case for now, e.g. active-region based version - if (pileup != null || likelihoodMap == null) - return; - - for (Map.Entry> el : likelihoodMap.getLikelihoodReadMap().entrySet()) { - - final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()); - if (! a.isInformative()) - continue; // read is non-informative - if (a.getMostLikelyAllele().isReference()) - refQuals.add((double)AlignmentUtils.getNumHardClippedBases(el.getKey())); - else if (allAlleles.contains(a.getMostLikelyAllele())) - altQuals.add((double)AlignmentUtils.getNumHardClippedBases(el.getKey())); - - } + @Override + protected Double getElementForRead(final GATKSAMRecord read, final int refLoc) { + return (double)AlignmentUtils.getNumHardClippedBases(read); } - } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerAlleleBySample.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerAlleleBySample.java index 1cf91f181..b22ea7931 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerAlleleBySample.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerAlleleBySample.java @@ -66,10 +66,7 @@ import org.broadinstitute.variant.variantcontext.Genotype; import org.broadinstitute.variant.variantcontext.GenotypeBuilder; import org.broadinstitute.variant.variantcontext.VariantContext; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; /** @@ -135,20 +132,24 @@ public class DepthPerAlleleBySample extends GenotypeAnnotation implements Standa } private void annotateWithLikelihoods(final PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap, final VariantContext vc, final GenotypeBuilder gb) { - final HashMap alleleCounts = new HashMap(); + final Set alleles = new HashSet<>(vc.getAlleles()); + + // make sure that there's a meaningful relationship between the alleles in the perReadAlleleLikelihoodMap and our VariantContext + if ( ! perReadAlleleLikelihoodMap.getAllelesSet().containsAll(alleles) ) + throw new IllegalStateException("VC alleles " + alleles + " not a strict subset of per read allele map alleles " + perReadAlleleLikelihoodMap.getAllelesSet()); + + final HashMap alleleCounts = new HashMap<>(); + for ( final Allele allele : vc.getAlleles() ) { alleleCounts.put(allele, 0); } - for ( final Allele allele : vc.getAlleles() ) { - alleleCounts.put(allele, 0); - } for (Map.Entry> el : perReadAlleleLikelihoodMap.getLikelihoodReadMap().entrySet()) { + final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue(), alleles); + if (! a.isInformative() ) continue; // read is non-informative final GATKSAMRecord read = el.getKey(); - final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()); - if (! a.isInformative() ) - continue; // read is non-informative - if (!vc.getAlleles().contains(a.getMostLikelyAllele())) - continue; // sanity check - shouldn't be needed - alleleCounts.put(a.getMostLikelyAllele(), alleleCounts.get(a.getMostLikelyAllele()) + (read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1)); + final int prevCount = alleleCounts.get(a.getMostLikelyAllele()); + final int incCount = read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1; + alleleCounts.put(a.getMostLikelyAllele(), prevCount + incCount); } + final int[] counts = new int[alleleCounts.size()]; counts[0] = alleleCounts.get(vc.getReference()); for (int i = 0; i < vc.getAlternateAlleles().size(); i++) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerSampleHC.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerSampleHC.java new file mode 100644 index 000000000..9bd641011 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/DepthPerSampleHC.java @@ -0,0 +1,126 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.annotator; + +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.AnnotatorCompatible; +import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.GenotypeAnnotation; +import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnotation; +import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; +import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.sting.utils.sam.ReadUtils; +import org.broadinstitute.variant.variantcontext.Allele; +import org.broadinstitute.variant.variantcontext.Genotype; +import org.broadinstitute.variant.variantcontext.GenotypeBuilder; +import org.broadinstitute.variant.variantcontext.VariantContext; +import org.broadinstitute.variant.vcf.VCFConstants; +import org.broadinstitute.variant.vcf.VCFFormatHeaderLine; +import org.broadinstitute.variant.vcf.VCFStandardHeaderLines; + +import java.util.*; + + +/** + * The depth of coverage of each allele per sample + * + * the depth for the HC is the sum of the informative alleles at this site. It's not perfect (as we cannot + * differentiate between reads that align over the event but aren't informative vs. those that aren't even + * close) but it's a pretty good proxy and it matches with the AD field (i.e., sum(AD) = DP). + */ +public class DepthPerSampleHC extends GenotypeAnnotation { + public void annotate(final RefMetaDataTracker tracker, + final AnnotatorCompatible walker, + final ReferenceContext ref, + final AlignmentContext stratifiedContext, + final VariantContext vc, + final Genotype g, + final GenotypeBuilder gb, + final PerReadAlleleLikelihoodMap alleleLikelihoodMap) { + if ( g == null || !g.isCalled() || ( stratifiedContext == null && alleleLikelihoodMap == null) ) + return; + + if (alleleLikelihoodMap == null ) + throw new IllegalStateException("DepthPerSampleHC can only be used with likelihood based annotations in the HaplotypeCaller"); + + // the depth for the HC is the sum of the informative alleles at this site. It's not perfect (as we cannot + // differentiate between reads that align over the event but aren't informative vs. those that aren't even + // close) but it's a pretty good proxy and it matches with the AD field (i.e., sum(AD) = DP). + int dp = 0; + + if ( alleleLikelihoodMap.isEmpty() ) { + // there are no reads + } else { + final Set alleles = new HashSet<>(vc.getAlleles()); + + // make sure that there's a meaningful relationship between the alleles in the perReadAlleleLikelihoodMap and our VariantContext + if ( ! alleleLikelihoodMap.getAllelesSet().containsAll(alleles) ) + throw new IllegalStateException("VC alleles " + alleles + " not a strict subset of per read allele map alleles " + alleleLikelihoodMap.getAllelesSet()); + + for (Map.Entry> el : alleleLikelihoodMap.getLikelihoodReadMap().entrySet()) { + final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue(), alleles); + if ( a.isInformative() ) { + final GATKSAMRecord read = el.getKey(); + final int incCount = read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1; + dp += incCount; + } + } + + gb.DP(dp); + } + } + + public List getKeyNames() { + return Collections.singletonList(VCFConstants.DEPTH_KEY); + } + + public List getDescriptions() { + return Collections.singletonList(VCFStandardHeaderLines.getFormatLine(getKeyNames().get(0))); + } +} \ No newline at end of file diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/FisherStrand.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/FisherStrand.java index 957eb1aba..876dbf039 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/FisherStrand.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/FisherStrand.java @@ -300,7 +300,7 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat for ( Map.Entry sample : stratifiedContexts.entrySet() ) { for (PileupElement p : sample.getValue().getBasePileup()) { - if ( ! RankSumTest.isUsableBase(p, false) ) // ignore deletions + if ( ! isUsableBase(p) ) // ignore deletions and bad MQ continue; if ( p.getQual() < minQScoreToConsider || p.getMappingQual() < minQScoreToConsider ) @@ -313,6 +313,20 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat return table; } + /** + * Can the base in this pileup element be used in comparative tests? + * + * @param p the pileup element to consider + * + * @return true if this base is part of a meaningful read for comparison, false otherwise + */ + private static boolean isUsableBase(final PileupElement p) { + return !( p.isDeletion() || + p.getMappingQual() == 0 || + p.getMappingQual() == QualityUtils.MAPPING_QUALITY_UNAVAILABLE || + ((int) p.getQual()) < QualityUtils.MIN_USABLE_Q_SCORE); + } + private static void updateTable(final int[][] table, final Allele allele, final GATKSAMRecord read, final Allele ref, final Allele alt, final int representativeCount) { final boolean matchesRef = allele.equals(ref, true); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/LikelihoodRankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/LikelihoodRankSumTest.java new file mode 100644 index 000000000..c7fff5a7f --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/LikelihoodRankSumTest.java @@ -0,0 +1,79 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.annotator; + +import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.variant.vcf.VCFHeaderLineType; +import org.broadinstitute.variant.vcf.VCFInfoHeaderLine; + +import java.util.Arrays; +import java.util.List; + +/** + * U-based z-approximation from the Mann-Whitney Rank Sum Test contrasting the likelihoods of reads to their + * most likely haplotypes. This is effectively testing for a differentiate quality in the modeling of the alt + * allele than the reference allele. + */ +public class LikelihoodRankSumTest extends RankSumTest { + @Override + public List getKeyNames() { return Arrays.asList("LikelihoodRankSum"); } + + @Override + public List getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("LikelihoodRankSum", 1, VCFHeaderLineType.Float, "Z-score from Wilcoxon rank sum test of Alt Vs. Ref haplotype likelihoods")); } + + @Override + protected Double getElementForRead(final GATKSAMRecord read, final int refLoc, final MostLikelyAllele mostLikelyAllele) { + if ( ! mostLikelyAllele.isInformative() ) throw new IllegalStateException("Should never have seen non-informative read " + read + " MostLikelyAllele " + mostLikelyAllele); + return mostLikelyAllele.getLog10LikelihoodOfMostLikely(); + } + + @Override + protected Double getElementForRead(GATKSAMRecord read, int refLoc) { + throw new IllegalStateException("This method should never have been called as getElementForRead(read,refloc,mostLikelyAllele) was overloaded"); + } +} \ No newline at end of file diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/MappingQualityRankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/MappingQualityRankSumTest.java index 3873138a2..b2a504eb2 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/MappingQualityRankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/MappingQualityRankSumTest.java @@ -47,14 +47,10 @@ package org.broadinstitute.sting.gatk.walkers.annotator; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnotation; -import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; -import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.pileup.PileupElement; import org.broadinstitute.variant.vcf.VCFHeaderLineType; import org.broadinstitute.variant.vcf.VCFInfoHeaderLine; -import org.broadinstitute.sting.utils.pileup.PileupElement; -import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; -import org.broadinstitute.variant.variantcontext.Allele; import java.util.*; @@ -68,40 +64,19 @@ import java.util.*; *

The mapping quality rank sum test can not be calculated for sites without a mixture of reads showing both the reference and alternate alleles.

*/ public class MappingQualityRankSumTest extends RankSumTest implements StandardAnnotation { - + @Override public List getKeyNames() { return Arrays.asList("MQRankSum"); } + @Override public List getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("MQRankSum", 1, VCFHeaderLineType.Float, "Z-score From Wilcoxon rank sum test of Alt vs. Ref read mapping qualities")); } - protected void fillQualsFromPileup(final List allAlleles, - final int refLoc, - final ReadBackedPileup pileup, - final PerReadAlleleLikelihoodMap likelihoodMap, - final List refQuals, final List altQuals) { - - if (pileup != null && likelihoodMap == null) { - // old UG snp-only path through the annotations - for ( final PileupElement p : pileup ) { - if ( isUsableBase(p) ) { - if ( allAlleles.get(0).equals(Allele.create(p.getBase(), true)) ) { - refQuals.add((double)p.getMappingQual()); - } else if ( allAlleles.contains(Allele.create(p.getBase()))) { - altQuals.add((double)p.getMappingQual()); - } - } - } - return; - } - for (Map.Entry> el : likelihoodMap.getLikelihoodReadMap().entrySet()) { - final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()); - // BUGBUG: There needs to be a comparable isUsableBase check here - if (! a.isInformative()) - continue; // read is non-informative - if (a.getMostLikelyAllele().isReference()) - refQuals.add((double)el.getKey().getMappingQuality()); - else if (allAlleles.contains(a.getMostLikelyAllele())) - altQuals.add((double)el.getKey().getMappingQuality()); - } + @Override + protected Double getElementForRead(final GATKSAMRecord read, final int refLoc) { + return (double)read.getMappingQuality(); } - } \ No newline at end of file + @Override + protected Double getElementForPileupElement(final PileupElement p) { + return (double)p.getRead().getMappingQuality(); + } +} \ No newline at end of file diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RMSMappingQuality.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RMSMappingQuality.java index 18348162e..d9bc5966c 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RMSMappingQuality.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RMSMappingQuality.java @@ -56,6 +56,7 @@ import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnota import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.QualityUtils; +import org.broadinstitute.sting.utils.sam.ReadUtils; import org.broadinstitute.variant.vcf.VCFConstants; import org.broadinstitute.variant.vcf.VCFInfoHeaderLine; import org.broadinstitute.variant.vcf.VCFStandardHeaderLines; @@ -77,55 +78,41 @@ public class RMSMappingQuality extends InfoFieldAnnotation implements StandardAn final Map stratifiedContexts, final VariantContext vc, final Map perReadAlleleLikelihoodMap ) { - int totalSize = 0, index = 0; - int qualities[]; - if (stratifiedContexts != null) { + + final List qualities = new ArrayList<>(); + if ( stratifiedContexts != null ) { if ( stratifiedContexts.size() == 0 ) return null; - for ( AlignmentContext context : stratifiedContexts.values() ) - totalSize += context.size(); - - qualities = new int[totalSize]; - - for ( Map.Entry sample : stratifiedContexts.entrySet() ) { - AlignmentContext context = sample.getValue(); - for (PileupElement p : context.getBasePileup() ) - index = fillMappingQualitiesFromPileupAndUpdateIndex(p.getRead(), index, qualities); + for ( final Map.Entry sample : stratifiedContexts.entrySet() ) { + final AlignmentContext context = sample.getValue(); + for ( final PileupElement p : context.getBasePileup() ) + fillMappingQualitiesFromPileup(p.getRead().getMappingQuality(), p.getRepresentativeCount(), qualities); } } else if (perReadAlleleLikelihoodMap != null) { if ( perReadAlleleLikelihoodMap.size() == 0 ) return null; - for ( PerReadAlleleLikelihoodMap perReadLikelihoods : perReadAlleleLikelihoodMap.values() ) - totalSize += perReadLikelihoods.size(); - - qualities = new int[totalSize]; - for ( PerReadAlleleLikelihoodMap perReadLikelihoods : perReadAlleleLikelihoodMap.values() ) { - for (GATKSAMRecord read : perReadLikelihoods.getStoredElements()) - index = fillMappingQualitiesFromPileupAndUpdateIndex(read, index, qualities); - - - } + for ( final PerReadAlleleLikelihoodMap perReadLikelihoods : perReadAlleleLikelihoodMap.values() ) { + for ( final GATKSAMRecord read : perReadLikelihoods.getStoredElements() ) + fillMappingQualitiesFromPileup(read.getMappingQuality(), (read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1), qualities); + } } else return null; - - - double rms = MathUtils.rms(qualities); - Map map = new HashMap(); - map.put(getKeyNames().get(0), String.format("%.2f", rms)); - return map; + final double rms = MathUtils.rms(qualities); + return Collections.singletonMap(getKeyNames().get(0), (Object)String.format("%.2f", rms)); } - private static int fillMappingQualitiesFromPileupAndUpdateIndex(final GATKSAMRecord read, final int inputIdx, final int[] qualities) { - int outputIdx = inputIdx; - if ( read.getMappingQuality() != QualityUtils.MAPPING_QUALITY_UNAVAILABLE ) - qualities[outputIdx++] = read.getMappingQuality(); - - return outputIdx; + private static void fillMappingQualitiesFromPileup(final int mq, final int representativeCount, final List qualities) { + if ( mq != QualityUtils.MAPPING_QUALITY_UNAVAILABLE ) { + if ( representativeCount == 1 ) + qualities.add(mq); + else + qualities.addAll(Collections.nCopies(representativeCount, mq)); + } } public List getKeyNames() { return Arrays.asList(VCFConstants.RMS_MAPPING_QUALITY_KEY); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java index ef456824e..1ba13afa1 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/RankSumTest.java @@ -53,9 +53,11 @@ import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.ActiveRegionBasedAnnotation; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.AnnotatorCompatible; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.InfoFieldAnnotation; +import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.utils.MannWhitneyU; import org.broadinstitute.sting.utils.QualityUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.variant.vcf.VCFHeaderLine; import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.sting.utils.pileup.PileupElement; @@ -87,31 +89,33 @@ public abstract class RankSumTest extends InfoFieldAnnotation implements ActiveR if (genotypes == null || genotypes.size() == 0) return null; - final ArrayList refQuals = new ArrayList(); - final ArrayList altQuals = new ArrayList(); + final ArrayList refQuals = new ArrayList<>(); + final ArrayList altQuals = new ArrayList<>(); for ( final Genotype genotype : genotypes.iterateInSampleNameOrder() ) { - PerReadAlleleLikelihoodMap indelLikelihoodMap = null; - ReadBackedPileup pileup = null; + boolean usePileup = true; - if (stratifiedContexts != null) { // the old UG SNP-only path through the annotations - final AlignmentContext context = stratifiedContexts.get(genotype.getSampleName()); - if ( context != null ) - pileup = context.getBasePileup(); + if ( stratifiedPerReadAlleleLikelihoodMap != null ) { + final PerReadAlleleLikelihoodMap likelihoodMap = stratifiedPerReadAlleleLikelihoodMap.get(genotype.getSampleName()); + if ( likelihoodMap != null && !likelihoodMap.isEmpty() ) { + fillQualsFromLikelihoodMap(vc.getAlleles(), vc.getStart(), likelihoodMap, refQuals, altQuals); + usePileup = false; + } } - if (stratifiedPerReadAlleleLikelihoodMap != null ) - indelLikelihoodMap = stratifiedPerReadAlleleLikelihoodMap.get(genotype.getSampleName()); - if (indelLikelihoodMap != null && indelLikelihoodMap.isEmpty()) - indelLikelihoodMap = null; - // treat an empty likelihood map as a null reference - will simplify contract with fillQualsFromPileup - if (indelLikelihoodMap == null && pileup == null) - continue; - - fillQualsFromPileup(vc.getAlleles(), vc.getStart(), pileup, indelLikelihoodMap, refQuals, altQuals ); + // the old UG SNP-only path through the annotations + if ( usePileup && stratifiedContexts != null ) { + final AlignmentContext context = stratifiedContexts.get(genotype.getSampleName()); + if ( context != null ) { + final ReadBackedPileup pileup = context.getBasePileup(); + if ( pileup != null ) + fillQualsFromPileup(vc.getAlleles(), pileup, refQuals, altQuals); + } + } } - if (refQuals.isEmpty() && altQuals.isEmpty()) + + if ( refQuals.isEmpty() && altQuals.isEmpty() ) return null; final MannWhitneyU mannWhitneyU = new MannWhitneyU(useDithering); @@ -136,18 +140,89 @@ public abstract class RankSumTest extends InfoFieldAnnotation implements ActiveR // we are testing that set1 (the alt bases) have lower quality scores than set2 (the ref bases) final Pair testResults = mannWhitneyU.runOneSidedTest(MannWhitneyU.USet.SET1); - final Map map = new HashMap(); + final Map map = new HashMap<>(); if (!Double.isNaN(testResults.first)) map.put(getKeyNames().get(0), String.format("%.3f", testResults.first)); return map; } - protected abstract void fillQualsFromPileup(final List alleles, - final int refLoc, - final ReadBackedPileup readBackedPileup, - final PerReadAlleleLikelihoodMap alleleLikelihoodMap, - final List refQuals, - final List altQuals); + private void fillQualsFromPileup(final List alleles, + final ReadBackedPileup pileup, + final List refQuals, + final List altQuals) { + for ( final PileupElement p : pileup ) { + if ( isUsableBase(p) ) { + final Double value = getElementForPileupElement(p); + if ( value == null ) + continue; + + if ( alleles.get(0).equals(Allele.create(p.getBase(), true)) ) + refQuals.add(value); + else if ( alleles.contains(Allele.create(p.getBase())) ) + altQuals.add(value); + } + } + } + + private void fillQualsFromLikelihoodMap(final List alleles, + final int refLoc, + final PerReadAlleleLikelihoodMap likelihoodMap, + final List refQuals, + final List altQuals) { + for ( final Map.Entry> el : likelihoodMap.getLikelihoodReadMap().entrySet() ) { + final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()); + if ( ! a.isInformative() ) + continue; // read is non-informative + + final GATKSAMRecord read = el.getKey(); + if ( isUsableRead(read, refLoc) ) { + final Double value = getElementForRead(read, refLoc, a); + if ( value == null ) + continue; + + if ( a.getMostLikelyAllele().isReference() ) + refQuals.add(value); + else if ( alleles.contains(a.getMostLikelyAllele()) ) + altQuals.add(value); + } + } + } + + /** + * Get the element for the given read at the given reference position + * + * @param read the read + * @param refLoc the reference position + * @param mostLikelyAllele the most likely allele for this read + * @return a Double representing the element to be used in the rank sum test, or null if it should not be used + */ + protected Double getElementForRead(final GATKSAMRecord read, final int refLoc, final MostLikelyAllele mostLikelyAllele) { + return getElementForRead(read, refLoc); + } + + /** + * Get the element for the given read at the given reference position + * + * @param read the read + * @param refLoc the reference position + * @return a Double representing the element to be used in the rank sum test, or null if it should not be used + */ + protected abstract Double getElementForRead(final GATKSAMRecord read, final int refLoc); + + // TODO -- until the ReadPosRankSumTest stops treating these differently, we need to have separate methods for GATKSAMRecords and PileupElements. Yuck. + + /** + * Get the element for the given read at the given reference position + * + * By default this function returns null, indicating that the test doesn't support the old style of pileup calculations + * + * @param p the pileup element + * @return a Double representing the element to be used in the rank sum test, or null if it should not be used + */ + protected Double getElementForPileupElement(final PileupElement p) { + // does not work in pileup mode + return null; + } /** * Can the base in this pileup element be used in comparative tests between ref / alt bases? @@ -157,30 +232,33 @@ public abstract class RankSumTest extends InfoFieldAnnotation implements ActiveR * @param p the pileup element to consider * @return true if this base is part of a meaningful read for comparison, false otherwise */ - public static boolean isUsableBase(final PileupElement p) { - return isUsableBase(p, false); + protected boolean isUsableBase(final PileupElement p) { + return !(p.isDeletion() || + p.getMappingQual() == 0 || + p.getMappingQual() == QualityUtils.MAPPING_QUALITY_UNAVAILABLE || + ((int) p.getQual()) < QualityUtils.MIN_USABLE_Q_SCORE || // need the unBAQed quality score here + p.getRead().isReducedRead() ); } /** - * Can the base in this pileup element be used in comparative tests between ref / alt bases? + * Can the read be used in comparative tests between ref / alt bases? * - * @param p the pileup element to consider - * @param allowDeletions if true, allow p to be a deletion base - * @return true if this base is part of a meaningful read for comparison, false otherwise + * @param read the read to consider + * @param refLoc the reference location + * @return true if this read is meaningful for comparison, false otherwise */ - public static boolean isUsableBase(final PileupElement p, final boolean allowDeletions) { - return !((! allowDeletions && p.isDeletion()) || - p.getMappingQual() == 0 || - p.getMappingQual() == QualityUtils.MAPPING_QUALITY_UNAVAILABLE || - ((int) p.getQual()) < QualityUtils.MIN_USABLE_Q_SCORE); // need the unBAQed quality score here + protected boolean isUsableRead(final GATKSAMRecord read, final int refLoc) { + return !( read.getMappingQuality() == 0 || + read.getMappingQuality() == QualityUtils.MAPPING_QUALITY_UNAVAILABLE || + read.isReducedRead() ); } /** * Initialize the rank sum test annotation using walker and engine information. Right now this checks to see if * engine randomization is turned off, and if so does not dither. - * @param walker - * @param toolkit - * @param headerLines + * @param walker the walker + * @param toolkit the GATK engine + * @param headerLines the header lines */ public void initialize ( AnnotatorCompatible walker, GenomeAnalysisEngine toolkit, Set headerLines ) { useDithering = ! toolkit.getArguments().disableDithering; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ReadPosRankSumTest.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ReadPosRankSumTest.java index 6ce4aab49..f4528b16f 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ReadPosRankSumTest.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/annotator/ReadPosRankSumTest.java @@ -51,17 +51,13 @@ import net.sf.samtools.CigarElement; import net.sf.samtools.CigarOperator; import net.sf.samtools.SAMRecord; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnotation; -import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; -import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.gatk.walkers.indels.PairHMMIndelErrorModel; import org.broadinstitute.variant.vcf.VCFHeaderLineType; import org.broadinstitute.variant.vcf.VCFInfoHeaderLine; import org.broadinstitute.sting.utils.pileup.PileupElement; -import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; import org.broadinstitute.sting.utils.sam.AlignmentUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.sting.utils.sam.ReadUtils; -import org.broadinstitute.variant.variantcontext.Allele; import java.util.*; @@ -75,63 +71,44 @@ import java.util.*; */ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotation { - public List getKeyNames() { - return Arrays.asList("ReadPosRankSum"); - } + @Override + public List getKeyNames() { return Arrays.asList("ReadPosRankSum"); } + @Override public List getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("ReadPosRankSum", 1, VCFHeaderLineType.Float, "Z-score from Wilcoxon rank sum test of Alt vs. Ref read position bias")); } - protected void fillQualsFromPileup(final List allAlleles, - final int refLoc, - final ReadBackedPileup pileup, - final PerReadAlleleLikelihoodMap alleleLikelihoodMap, - final List refQuals, final List altQuals) { + @Override + protected Double getElementForRead(final GATKSAMRecord read, final int refLoc) { + final int offset = ReadUtils.getReadCoordinateForReferenceCoordinate( read.getSoftStart(), read.getCigar(), refLoc, ReadUtils.ClippingTail.RIGHT_TAIL, true ); + if ( offset == ReadUtils.CLIPPING_GOAL_NOT_REACHED ) + return null; - if (alleleLikelihoodMap == null) { - // use old UG SNP-based version if we don't have per-read allele likelihoods - for ( final PileupElement p : pileup ) { - if ( isUsableBase(p) && p.getRead().getCigar() != null ) { - int readPos = AlignmentUtils.calcAlignmentByteArrayOffset(p.getRead().getCigar(), p, 0, 0); - - readPos = getFinalReadPosition(p.getRead(),readPos); - - if ( allAlleles.get(0).equals(Allele.create(p.getBase(), true)) ) { - refQuals.add((double)readPos); - } else if ( allAlleles.contains(Allele.create(p.getBase()))) { - altQuals.add((double)readPos); - } - } - } - return; - } - - for (Map.Entry> el : alleleLikelihoodMap.getLikelihoodReadMap().entrySet()) { - final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()); - if (! a.isInformative() ) - continue; // read is non-informative - - final GATKSAMRecord read = el.getKey(); - if ( read.getSoftStart() + read.getCigar().getReadLength() <= refLoc ) { // make sure the read actually covers the requested ref loc - continue; - } - final int offset = ReadUtils.getReadCoordinateForReferenceCoordinate( read.getSoftStart(), read.getCigar(), refLoc, ReadUtils.ClippingTail.RIGHT_TAIL, true ); - if ( offset == ReadUtils.CLIPPING_GOAL_NOT_REACHED || read.getCigar() == null ) - continue; - int readPos = AlignmentUtils.calcAlignmentByteArrayOffset( read.getCigar(), offset, false, 0, 0 ); - final int numAlignedBases = AlignmentUtils.getNumAlignedBasesCountingSoftClips( read ); - if (readPos > numAlignedBases / 2) - readPos = numAlignedBases - (readPos + 1); - - if (a.getMostLikelyAllele().isReference()) - refQuals.add((double)readPos); - else if (allAlleles.contains(a.getMostLikelyAllele())) - altQuals.add((double)readPos); - } + int readPos = AlignmentUtils.calcAlignmentByteArrayOffset( read.getCigar(), offset, false, 0, 0 ); + final int numAlignedBases = AlignmentUtils.getNumAlignedBasesCountingSoftClips( read ); + if (readPos > numAlignedBases / 2) + readPos = numAlignedBases - (readPos + 1); + return (double)readPos; } - int getFinalReadPosition(GATKSAMRecord read, int initialReadPosition) { + @Override + protected Double getElementForPileupElement(final PileupElement p) { + final int offset = AlignmentUtils.calcAlignmentByteArrayOffset(p.getRead().getCigar(), p, 0, 0); + return (double)getFinalReadPosition(p.getRead(), offset); + } + + @Override + protected boolean isUsableBase(final PileupElement p) { + return super.isUsableBase(p) && p.getRead().getCigar() != null; + } + + @Override + protected boolean isUsableRead(final GATKSAMRecord read, final int refLoc) { + return super.isUsableRead(read, refLoc) && read.getSoftStart() + read.getCigar().getReadLength() > refLoc; + } + + private int getFinalReadPosition(final GATKSAMRecord read, final int initialReadPosition) { final int numAlignedBases = getNumAlignedBases(read); int readPos = initialReadPosition; @@ -141,7 +118,8 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio return readPos; } - int getNumClippedBasesAtStart(SAMRecord read) { + + private int getNumClippedBasesAtStart(final SAMRecord read) { // compute total number of clipped bases (soft or hard clipped) // check for hard clips (never consider these bases): final Cigar c = read.getCigar(); @@ -151,8 +129,8 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio if (first.getOperator() == CigarOperator.H) { numStartClippedBases = first.getLength(); } - byte[] unclippedReadBases = read.getReadBases(); - byte[] unclippedReadQuals = read.getBaseQualities(); + final byte[] unclippedReadBases = read.getReadBases(); + final byte[] unclippedReadQuals = read.getBaseQualities(); // Do a stricter base clipping than provided by CIGAR string, since this one may be too conservative, // and may leave a string of Q2 bases still hanging off the reads. @@ -167,11 +145,11 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio return numStartClippedBases; } - int getNumAlignedBases(SAMRecord read) { + private int getNumAlignedBases(final GATKSAMRecord read) { return read.getReadLength() - getNumClippedBasesAtStart(read) - getNumClippedBasesAtEnd(read); } - int getNumClippedBasesAtEnd(SAMRecord read) { + private int getNumClippedBasesAtEnd(final GATKSAMRecord read) { // compute total number of clipped bases (soft or hard clipped) // check for hard clips (never consider these bases): final Cigar c = read.getCigar(); @@ -181,8 +159,8 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio if (last.getOperator() == CigarOperator.H) { numEndClippedBases = last.getLength(); } - byte[] unclippedReadBases = read.getReadBases(); - byte[] unclippedReadQuals = read.getBaseQualities(); + final byte[] unclippedReadBases = read.getReadBases(); + final byte[] unclippedReadQuals = read.getBaseQualities(); // Do a stricter base clipping than provided by CIGAR string, since this one may be too conservative, // and may leave a string of Q2 bases still hanging off the reads. @@ -193,11 +171,6 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio break; } - return numEndClippedBases; } - - int getOffsetFromClippedReadStart(SAMRecord read, int offset) { - return offset - getNumClippedBasesAtStart(read); - } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariates.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariates.java new file mode 100644 index 000000000..c8c5eae0b --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariates.java @@ -0,0 +1,587 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ +package org.broadinstitute.sting.gatk.walkers.bqsr; + +import com.google.java.contract.Requires; +import org.broadinstitute.sting.commandline.Argument; +import org.broadinstitute.sting.commandline.Input; +import org.broadinstitute.sting.commandline.Output; +import org.broadinstitute.sting.gatk.CommandLineGATK; +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.walkers.RodWalker; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.help.DocumentedGATKFeature; +import org.broadinstitute.sting.utils.help.HelpConstants; +import org.broadinstitute.sting.utils.recalibration.RecalUtils; +import org.broadinstitute.sting.utils.recalibration.RecalibrationReport; +import org.broadinstitute.sting.utils.recalibration.BaseRecalibration; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.LinkedHashMap; +import java.util.Map; + + +/** + * Tool to analyze and evaluate base recalibration ables. + *

+ * For now it generates a plot report to assess the quality of a recalibration. + * + *

Input

+ * + * The tool can take up to three different sets of recalibration tables. + * The resulting plots will be overlaid on top of each other to make + * comparisons easy. + * + * + * + * + * + * + * + * + * + * + * + * + * + *
SetArgumentLabelColorDescription
Original-beforeBEFOREMaroon1First pass recalibration + * tables obtained from applying {@link BaseRecalibration} + * on the original alignment.
Recalibrated-afterAFTERBlueSecond pass recalibration tables + * results from the application of {@link BaseRecalibration} + * on the alignment recalibrated using the first pass tables
Input-BQSRBQSRBlackAny recalibration table without a specific role
+ *
+ * + * You need to specify one set at least. Multiple sets need to have the same values for the following parameters: + *

+ * covariate (order is not important), no_standard_covs, run_without_dbsnp, solid_recal_mode, + * solid_nocall_strategy, mismatches_context_size, mismatches_default_quality, deletions_default_quality, + * insertions_default_quality, maximum_cycle_value, low_quality_tail, default_platform, force_platform, + * quantizing_levels and binary_tag_name + *

Output

+ * + * Currently this tool generates two outputs: + * + *
+ *
-plots my-report.pdf
+ *
A pdf document that encloses plots to assess the quality of the recalibration.
+ *
-csv my-report.csv
+ *
A csv file that contains a table with all the data required to generate those plots.
+ *
+ * + * You need to specify at least one of them. + * + *

Other Arguments

+ * + *

-ignoreLMT, --ignoreLastModificationTimes

+ * + * when set, no warning message will be displayed in the -before recalibration table file is older than the -after one. + * + *

Examples

+ * + * + *

Plot a single recalibration table

+ *
+ * java -jar GenomeAnalysisTK.jar \
+ *      -T AnalyzeCovariates \
+ *      -R myrefernce.fasta \
+ *      -BQSR myrecal.table \
+ *      -plots BQSR.pdf
+ * 
+ * + *

Plot before (first pass) and after (second pass) recalibration table to compare them

+ * + *
+ * java -jar GenomeAnalysisTK.jar \
+ *      -T AnalyzeCovariates \
+ *      -R myrefernce.fasta \
+ *      -before recal2.table \
+ *      -after recal3.table \
+ *      -plots recalQC.pdf
+ * 
+ * + *

Plot up to three recalibration tables for comparison

+ * + *
+ *
+ * # You can ignore the before/after semantics completely if you like (if you do add -ignoreLMT
+ * # to avoid a possible warning), but all tables should have been generated using the same parameters.
+ *
+ * java -jar GenomeAnalysisTK.jar \
+ *      -T AnalyzeCovariates \
+ *      -R myrefernce.fasta \
+ *      -ignoreLMT \
+ *      -BQSR recal1.table \   # you can discard any two
+ *      -before recal2.table \
+ *      -after recal3.table \
+ *      -plots myrecals.pdf
+ * 
+ * + *

Full BQSR quality assessment pipeline

+ * + *
+ * # Generate the first pass recalibration table file.
+ * java -jar GenomeAnalysisTK.jar \
+ *      -T BaseRecalibrator \
+ *      -R myreference.fasta \
+ *      -I myinput.bam \
+ *      -knownSites bundle/my-trusted-snps.vcf \ # optional but recommendable
+ *      -knownSites bundle/my-trusted-indels.vcf \ # optional but recommendable
+ *      ... other options
+ *      -o firstpass.table
+ *
+ * # Generate the second pass recalibration table file.
+ * java -jar GenomeAnalysisTK.jar \
+ *      -T BaseRecalibrator \
+ *      -BQSR firstpass.table \
+ *      -R myreference.fasta \
+ *      -I myinput.bam \
+ *      -knownSites bundle/my-trusted-snps.vcf \
+ *      -knownSites bundle/my-trusted-indels.vcf \
+ *      ... other options \
+ *      -o secondpass.table
+ *
+ * # Finally generate the plots report and also keep a copy of the csv (optional).
+ * java -jar GenomeAnalysisTK.jar \
+ *      -T AnalyzeCovariates \
+ *      -R myrefernce.fasta \
+ *      -before firstpass.table \
+ *      -after secondpass.table \
+ *      -csv BQSR.csv \ # optional
+ *      -plots BQSR.pdf
+ * 
+ * + * @author Valentin Ruano-Rubio <valentin@broadinstitute.org> + * @version 6/16/2013 + * @since 2.6 + */ +@DocumentedGATKFeature(groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class}) +public final class AnalyzeCovariates extends RodWalker { + + + // Constants on option short names that are used in some error/warning messages: + + static final String CSV_ARG_SHORT_NAME = "csv"; + static final String PDF_ARG_SHORT_NAME = "plots"; + static final String BEFORE_ARG_SHORT_NAME = "before"; + static final String AFTER_ARG_SHORT_NAME = "after"; + + /** + * File containing the recalibration tables from the first pass. + */ + @Input(shortName=BEFORE_ARG_SHORT_NAME,fullName="beforeReportFile", doc = "file containing the BQSR first-pass report file",required = false) + protected File beforeFile = null; + + /** + * File containing the recalibration tables from the second pass. + */ + @Input(shortName=AFTER_ARG_SHORT_NAME, fullName="afterReportFile", doc = "file containing the BQSR second-pass report file",required = false) + protected File afterFile = null; + + /** + * If true, it won't show a warning if the last-modification time of the before and after input files suggest that they have been reversed. + */ + @Argument(shortName="ignoreLMT", fullName="ignoreLastModificationTimes", doc= "do not emit warning messages related to suspicious last modification time order of inputs", required = false) + protected boolean ignoreLastModificationTime = false; + + /** + * Output report file name. + */ + @Output(shortName=PDF_ARG_SHORT_NAME, fullName="plotsReportFile" ,doc = "location of the output report", required = false) + protected File pdfFile = null; + + /** + * Output csv file name. + */ + @Output(shortName=CSV_ARG_SHORT_NAME,fullName="intermediateCsvFile" ,doc = "location of the csv intermediate file", required = false) + protected File csvFile = null; + + /** + * Convenience reference to the RECAL_BQSR_FILE argument value. + *

+ * This field value is resolved by {@link #initialize()}. + */ + protected File bqsrFile = null; + + /** + * Checks inputs and argument values. + *

+ * Notice that this routine will not validate the content of files. It may have some minor side effects as + * the output of warning messages back to the user. + * + * @throw IllegalStateException there is some required argument value that has not been loaded yet. + * @throw UserException if there is some error caused by or under the end user's control. + */ + private void checkArgumentsValues() { + checkInputReportFile("BQSR",bqsrFile); + checkInputReportFile("before",beforeFile); + checkInputReportFile("after",afterFile); + if (bqsrFile == null && beforeFile == null && afterFile == null) { + throw new UserException("you must provide at least one recalibration report file " + + "(arguments -BQSR, -" + BEFORE_ARG_SHORT_NAME + " or -" + AFTER_ARG_SHORT_NAME); + } + + checkOutputFile(PDF_ARG_SHORT_NAME,pdfFile); + checkOutputFile(CSV_ARG_SHORT_NAME, csvFile); + checkInputReportFileLMT(beforeFile,afterFile); + checkOutputRequested(); + } + + /** + * Checks whether the last-modification-time of the inputs is consistent with their relative roles. + * + * This routine does not thrown an exception but may output a warning message if inconsistencies are spotted. + * + * @param beforeFile the before report file. + * @param afterFile the after report file. + */ + private void checkInputReportFileLMT(final File beforeFile, final File afterFile) { + + if (ignoreLastModificationTime || beforeFile == null || afterFile == null) { + return; // nothing to do here + } else if (beforeFile.lastModified() > afterFile.lastModified()) { + Utils.warnUser("Last modification timestamp for 'Before' and 'After'" + + "recalibration reports are in the wrong order. Perhaps, have they been swapped?"); + } + } + + /** + * Checks that at least one output was requested. + * + * @throw UserException if no output was requested. + */ + private void checkOutputRequested() { + if (pdfFile == null && csvFile == null) { + throw new UserException("you need to request at least one output:" + + " the intermediate csv file (-" + CSV_ARG_SHORT_NAME + " FILE)" + + " or the final plot file (-" + PDF_ARG_SHORT_NAME + " FILE)."); + } + } + + /** + * Checks the value provided to input file arguments. + * + * @throw UserException if there is any problem cause by or under the end user's control + * + * @param name command line argument short name. + * @param value the argument value. + */ + private void checkInputReportFile(final String name,final File value) { + if (value == null) { + return; + } else if (!value.exists()) { + throw new UserException.BadArgumentValue(name, "input report '" + + value + "' does not exist or is unreachable"); + } else if (!value.isFile()) { + throw new UserException.BadArgumentValue(name, "input report '" + + value + "' is not a regular file"); + } else if (!value.canRead()) { + throw new UserException.BadArgumentValue(name, "input report '" + + value + "' cannot be read"); + } + } + + /** + * Checks the value provided for output arguments. + * + * @throw UserException if there is any problem cause by or under the end user's control + * + * @param name command line argument short name. + * @param value the argument value. + */ + private void checkOutputFile(final String name, final File value) { + if (value == null) { + return; + } + if (value.exists() && !value.isFile()) { + throw new UserException.BadArgumentValue(name, "the output file location '" + + value + "' exists as not a file"); + } + final File parent = value.getParentFile(); + if (parent == null) { + return; + } + if (!parent.exists()) { + throw new UserException.BadArgumentValue(name, "the output file parent directory '" + + parent + "' does not exists or is unreachable"); + } else if (!parent.isDirectory()) { + throw new UserException.BadArgumentValue(name, "the output file parent directory '" + + parent + "' is not a directory"); + } else if (!parent.canWrite()) { + throw new UserException.BadArgumentValue(name, "the output file parent directory '" + + parent + "' cannot be written"); + } + + } + + /** + * Generates the plots using the external R script. + * + *

+ * If plotsFile is null, it does not perform any plotting. + * + * @param csvFile the intermediary csv file. + * @param plotsFile the output plot location. + */ + private void generatePlots(final File csvFile, final Map reportFiles, final File plotsFile) { + + if (plotsFile == null) { + return; + } + logger.info("Generating plots file '" + plotsFile + "'"); + final File exampleReportFile = reportFiles.values().iterator().next(); + RecalUtils.generatePlots(csvFile,exampleReportFile,plotsFile); + } + + @Override + public void initialize() { + super.initialize(); + bqsrFile = getToolkit().getArguments().BQSR_RECAL_FILE; + checkArgumentsValues(); + final Map reportFiles = buildReportFileMap(); + final Map reports = buildReportMap(reportFiles); + checkReportConsistency(reports); + final File csvFile = resolveCsvFile(); + generateCsvFile(csvFile,reports); + final File plotFile = resolvePlotFile(); + generatePlots(csvFile, reportFiles, plotFile); + } + + /** + * Returns the plot output file + * @return might be null if the user has not indicated and output file. + */ + private File resolvePlotFile() { + return pdfFile; + } + + /** + * Generates the intermediary Csv file. + * + * @param csvFile where to write the file. + * @param reports the reports to be included. + */ + private void generateCsvFile(final File csvFile, final Map reports) { + try { + logger.info("Generating csv file '" + csvFile + "'"); + RecalUtils.generateCsv(csvFile, reports); + } catch (FileNotFoundException e) { + throw new UserException( + String.format("There is a problem creating the intermediary Csv file '%s': %s", + csvFile,e.getMessage()),e); + } + } + + /** + * Checks whether multiple input recalibration report files argument values are consistent (equal). + * + * @param reports map with report to verify. + * + * @throw UserException if there is any inconsistency. + */ + private void checkReportConsistency(final Map reports) { + final Map.Entry[] reportEntries = + reports.entrySet().toArray((Map.Entry[]) new Map.Entry[reports.size()]); + + final Map.Entry exampleEntry = reportEntries[0]; + + for (int i = 1; i < reportEntries.length; i++) { + final Map diffs = exampleEntry.getValue().getRAC().compareReportArguments( + reportEntries[i].getValue().getRAC(),exampleEntry.getKey(),reportEntries[i].getKey()); + if (diffs.size() != 0) { + throw new UserException.IncompatibleRecalibrationTableParameters("There are differences in relevant arguments of" + + " two or more input recalibration reports. Please make sure" + + " they have been created using the same recalibration parameters." + + " " + Utils.join("// ", reportDifferencesStringArray(diffs))); + } + } + } + + + /** + * Creates a map with all input recalibration files indexed by their "role". + *

+ * The key is the role and the value the corresponding report file. + *

+ * Roles: "Before" (recalibration), "After" (recalibration), "BQSR" (the tool standard argument recalibration file) + * + * @return never null + */ + private Map buildReportFileMap() { + final Map reports = new LinkedHashMap<>(3); + if (bqsrFile != null) { + reports.put("BQSR",bqsrFile); + } + if (beforeFile != null) { + reports.put("Before",beforeFile); + } + if (afterFile != null) { + reports.put("After",afterFile); + } + return reports; + } + + /** + * Transforms a recalibration file map into a report object map. + * + * @param reportFileMap the file map to transforms. + * @return never null, a new map with the same size as + * reportFileMap and the same key set. + */ + @Requires("reportFileMap != null") + private Map buildReportMap(final Map reportFileMap) { + final Map reports = new LinkedHashMap<>(reportFileMap.size()); + for (final Map.Entry e : reportFileMap.entrySet()) { + reports.put(e.getKey(),new RecalibrationReport(e.getValue())); + } + return reports; + } + + /** + * Generates a flatter String array representation of recalibration argument differences. + * @param diffs the differences to represent. + * + * @return never null, an array of the same length as the size of the input diffs. + */ + @Requires("diffs != null") + private String[] reportDifferencesStringArray(final Map diffs) { + final String[] result = new String[diffs.size()]; + int i = 0; + for (final Map.Entry e : diffs.entrySet()) { + result[i++] = capitalize(e.getKey()) + ": " + e.getValue(); + } + return result; + } + + /** + * Returns the input string capitalizing the first letter. + * + * @param str the string to capitalize + * @return never null. + */ + @Requires("str != null") + private String capitalize(final String str) { + if (str.isEmpty()) { + return str; + } else { + return Character.toUpperCase(str.charAt(0)) + str.substring(1); + } + } + + /** + * Returns the csv file to use. + *

+ * This is the the one specified by the user if any or a temporary file + * that will be deleted as soon as the VM exists by default. + * + * @return never null. + */ + private File resolveCsvFile() { + if (csvFile != null) { + return csvFile; + } else { + try { + final File result = File.createTempFile("AnalyzeCovariates", ".csv"); + result.deleteOnExit(); + return result; + } catch (IOException e) { + throw new UserException("Could not create temporary Csv file",e); + } + } + } + + /** + * Always return true, forcing the immediate termination of the travesal. + * @return + */ + @Override + public boolean isDone() { + return true; + } + + /** + * {@inheritDoc} + */ + @Override + public None reduceInit() { + return new None(); + } + + /** + * Is not supposed to ever be called, thus it always results in an exception. + * + * @throws IllegalStateException always. + */ + @Override + public None reduce(None value, None sum) { + throw new IllegalStateException("AnalyzeCovariates reduce method is not supposed to be invoked ever"); + } + + + /** + * Is not supposed to ever be called, thus it always results in an exception. + * + * @throws IllegalStateException always. + */ + @Override + public None map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { + throw new IllegalStateException("AnalyzeCovariates map method is not supposed to be invoked ever"); + } + + /** + * Dummy map and reduce types for the {@link AnalyzeCovariates} tool that in fact does not do any traversal. + */ + protected static class None { + private None() { + } + } +} + + diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRGatherer.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRGatherer.java index ad97dc008..d6f0e16e8 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRGatherer.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRGatherer.java @@ -92,18 +92,6 @@ public class BQSRGatherer extends Gatherer { generalReport.calculateQuantizedQualities(); - RecalibrationArgumentCollection RAC = generalReport.getRAC(); - if ( RAC.RECAL_PDF_FILE != null ) { - RAC.RECAL_TABLE_FILE = output; - if ( RAC.existingRecalibrationReport != null ) { - final RecalibrationReport originalReport = new RecalibrationReport(RAC.existingRecalibrationReport); - RecalUtils.generateRecalibrationPlot(RAC, originalReport.getRecalibrationTables(), generalReport.getRecalibrationTables(), generalReport.getCovariates()); - } - else { - RecalUtils.generateRecalibrationPlot(RAC, generalReport.getRecalibrationTables(), generalReport.getCovariates()); - } - } - generalReport.output(outputFile); } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java index dde49b7db..3882b70fa 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/BaseRecalibrator.java @@ -61,6 +61,7 @@ import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.*; import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.BaseUtils; +import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.baq.BAQ; import org.broadinstitute.sting.utils.clipping.ReadClipper; import org.broadinstitute.sting.utils.collections.Pair; @@ -124,7 +125,7 @@ import java.util.List; * -R resources/Homo_sapiens_assembly18.fasta \ * -knownSites bundle/hg18/dbsnp_132.hg18.vcf \ * -knownSites another/optional/setOfSitesToMask.vcf \ - * -o recal_data.grp + * -o recal_data.table * */ @@ -366,9 +367,7 @@ public class BaseRecalibrator extends ReadWalker implements NanoSche } protected static int[] calculateIsIndel( final GATKSAMRecord read, final EventType mode ) { - final byte[] readBases = read.getReadBases(); - final int[] indel = new int[readBases.length]; - Arrays.fill(indel, 0); + final int[] indel = new int[read.getReadBases().length]; int readPos = 0; for ( final CigarElement ce : read.getCigar().getCigarElements() ) { final int elementLength = ce.getLength(); @@ -383,21 +382,19 @@ public class BaseRecalibrator extends ReadWalker implements NanoSche } case D: { - final int index = ( read.getReadNegativeStrandFlag() ? readPos : ( readPos > 0 ? readPos - 1 : readPos ) ); - indel[index] = ( mode.equals(EventType.BASE_DELETION) ? 1 : 0 ); + final int index = ( read.getReadNegativeStrandFlag() ? readPos : readPos - 1 ); + updateIndel(indel, index, mode, EventType.BASE_DELETION); break; } case I: { final boolean forwardStrandRead = !read.getReadNegativeStrandFlag(); if( forwardStrandRead ) { - indel[(readPos > 0 ? readPos - 1 : readPos)] = ( mode.equals(EventType.BASE_INSERTION) ? 1 : 0 ); - } - for (int iii = 0; iii < elementLength; iii++) { - readPos++; + updateIndel(indel, readPos - 1, mode, EventType.BASE_INSERTION); } + readPos += elementLength; if( !forwardStrandRead ) { - indel[(readPos < indel.length ? readPos : readPos - 1)] = ( mode.equals(EventType.BASE_INSERTION) ? 1 : 0 ); + updateIndel(indel, readPos, mode, EventType.BASE_INSERTION); } break; } @@ -412,6 +409,12 @@ public class BaseRecalibrator extends ReadWalker implements NanoSche return indel; } + private static void updateIndel(final int[] indel, final int index, final EventType mode, final EventType requiredMode) { + if ( mode == requiredMode && index >= 0 && index < indel.length ) + // protect ourselves from events at the start or end of the read (1D3M or 3M1D) + indel[index] = 1; + } + protected static double[] calculateFractionalErrorArray( final int[] errorArray, final byte[] baqArray ) { if(errorArray.length != baqArray.length ) { throw new ReviewedStingException("Array length mismatch detected. Malformed read?"); @@ -514,28 +517,13 @@ public class BaseRecalibrator extends ReadWalker implements NanoSche generateReport(); logger.info("...done!"); - if ( RAC.RECAL_PDF_FILE != null ) { - logger.info("Generating recalibration plots..."); - generatePlots(); - } - - logger.info("Processed: " + result + " reads"); + logger.info("BaseRecalibrator was able to recalibrate " + result + " reads"); } private RecalibrationTables getRecalibrationTable() { return recalibrationEngine.getFinalRecalibrationTables(); } - private void generatePlots() { - File recalFile = getToolkit().getArguments().BQSR_RECAL_FILE; - if (recalFile != null) { - RecalibrationReport report = new RecalibrationReport(recalFile); - RecalUtils.generateRecalibrationPlot(RAC, report.getRecalibrationTables(), getRecalibrationTable(), requestedCovariates); - } - else - RecalUtils.generateRecalibrationPlot(RAC, getRecalibrationTable(), requestedCovariates); - } - /** * go through the quality score table and use the # observations and the empirical quality score * to build a quality score histogram for quantization. Then use the QuantizeQual algorithm to diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/RecalibrationArgumentCollection.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/RecalibrationArgumentCollection.java index 5a2cdc7a6..b9f16132c 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/RecalibrationArgumentCollection.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/bqsr/RecalibrationArgumentCollection.java @@ -46,15 +46,17 @@ package org.broadinstitute.sting.gatk.walkers.bqsr; +import com.google.java.contract.Requires; import org.broad.tribble.Feature; import org.broadinstitute.sting.commandline.*; import org.broadinstitute.sting.gatk.report.GATKReportTable; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.exceptions.StingException; import org.broadinstitute.sting.utils.recalibration.RecalUtils; import java.io.File; import java.io.PrintStream; -import java.util.Collections; -import java.util.List; +import java.util.*; /** * Created by IntelliJ IDEA. @@ -65,7 +67,7 @@ import java.util.List; * This set of arguments will also be passed to the constructor of every Covariate when it is instantiated. */ -public class RecalibrationArgumentCollection { +public class RecalibrationArgumentCollection implements Cloneable { /** * This algorithm treats every reference mismatch as an indication of error. However, real genetic variation is expected to mismatch the reference, @@ -87,21 +89,6 @@ public class RecalibrationArgumentCollection { public File RECAL_TABLE_FILE = null; public PrintStream RECAL_TABLE; - /** - * If not provided, then no plots will be generated (useful for queue scatter/gathering). - * However, we *highly* recommend that users generate these plots whenever possible for QC checking. - */ - @Output(fullName = "plot_pdf_file", shortName = "plots", doc = "The output recalibration pdf file to create", required = false, defaultToStdout = false) - public File RECAL_PDF_FILE = null; - - /** - * If not provided, then a temporary file is created and then deleted upon completion. - * For advanced users only. - */ - @Advanced - @Argument(fullName = "intermediate_csv_file", shortName = "intermediate", doc = "The intermediate csv file to create", required = false) - public File RECAL_CSV_FILE = null; - /** * Note that the --list argument requires a fully resolved and correct command-line to work. */ @@ -282,11 +269,147 @@ public class RecalibrationArgumentCollection { argumentsTable.set("quantizing_levels", RecalUtils.ARGUMENT_VALUE_COLUMN_NAME, QUANTIZING_LEVELS); argumentsTable.addRowID("recalibration_report", true); argumentsTable.set("recalibration_report", RecalUtils.ARGUMENT_VALUE_COLUMN_NAME, existingRecalibrationReport == null ? "null" : existingRecalibrationReport.getAbsolutePath()); - argumentsTable.addRowID("plot_pdf_file", true); - argumentsTable.set("plot_pdf_file", RecalUtils.ARGUMENT_VALUE_COLUMN_NAME, RECAL_PDF_FILE == null ? "null" : RECAL_PDF_FILE.getAbsolutePath()); argumentsTable.addRowID("binary_tag_name", true); argumentsTable.set("binary_tag_name", RecalUtils.ARGUMENT_VALUE_COLUMN_NAME, BINARY_TAG_NAME == null ? "null" : BINARY_TAG_NAME); return argumentsTable; } + /** + * Returns a map with the arguments that differ between this an + * another {@link RecalibrationArgumentCollection} instance. + *

+ * The key is the name of that argument in the report file. The value is a message + * that explains the difference to the end user. + *

+ * Thus, a empty map indicates that there is no differences between both argument collection that + * is relevant to report comparison. + *

+ * This method should not throw any exception. + * + * @param other the argument-collection to compare against. + * @param thisRole the name used to refer to this RAC report that makes sense to the end user. + * @param otherRole the name used to refer to the other RAC report that makes sense to the end user. + * + * @return never null, but a zero-size collection if there are no differences. + */ + @Requires("other != null && thisRole != null && otherRole != null && !thisRole.equalsIgnoreCase(otherRole)") + Map compareReportArguments(final RecalibrationArgumentCollection other,final String thisRole, final String otherRole) { + final Map result = new LinkedHashMap<>(15); + compareRequestedCovariates(result, other, thisRole, otherRole); + compareSimpleReportArgument(result,"no_standard_covs", DO_NOT_USE_STANDARD_COVARIATES, other.DO_NOT_USE_STANDARD_COVARIATES, thisRole, otherRole); + compareSimpleReportArgument(result,"run_without_dbsnp",RUN_WITHOUT_DBSNP,other.RUN_WITHOUT_DBSNP,thisRole,otherRole); + compareSimpleReportArgument(result,"solid_recal_mode", SOLID_RECAL_MODE, other.SOLID_RECAL_MODE,thisRole,otherRole); + compareSimpleReportArgument(result,"solid_nocall_strategy", SOLID_NOCALL_STRATEGY, other.SOLID_NOCALL_STRATEGY,thisRole,otherRole); + compareSimpleReportArgument(result,"mismatches_context_size", MISMATCHES_CONTEXT_SIZE,other.MISMATCHES_CONTEXT_SIZE,thisRole,otherRole); + compareSimpleReportArgument(result,"mismatches_default_quality", MISMATCHES_DEFAULT_QUALITY, other.MISMATCHES_DEFAULT_QUALITY,thisRole,otherRole); + compareSimpleReportArgument(result,"deletions_default_quality", DELETIONS_DEFAULT_QUALITY, other.DELETIONS_DEFAULT_QUALITY,thisRole,otherRole); + compareSimpleReportArgument(result,"insertions_default_quality", INSERTIONS_DEFAULT_QUALITY, other.INSERTIONS_DEFAULT_QUALITY,thisRole,otherRole); + compareSimpleReportArgument(result,"maximum_cycle_value", MAXIMUM_CYCLE_VALUE, other.MAXIMUM_CYCLE_VALUE,thisRole,otherRole); + compareSimpleReportArgument(result,"low_quality_tail", LOW_QUAL_TAIL, other.LOW_QUAL_TAIL,thisRole,otherRole); + compareSimpleReportArgument(result,"default_platform", DEFAULT_PLATFORM, other.DEFAULT_PLATFORM,thisRole,otherRole); + compareSimpleReportArgument(result,"force_platform", FORCE_PLATFORM, other.FORCE_PLATFORM,thisRole,otherRole); + compareSimpleReportArgument(result,"quantizing_levels", QUANTIZING_LEVELS, other.QUANTIZING_LEVELS,thisRole,otherRole); + compareSimpleReportArgument(result,"binary_tag_name", BINARY_TAG_NAME, other.BINARY_TAG_NAME,thisRole,otherRole); + return result; + } + + + /** + * Compares the covariate report lists. + * + * @param diffs map where to annotate the difference. + * @param other the argument collection to compare against. + * @param thisRole the name for this argument collection that makes sense to the user. + * @param otherRole the name for the other argument collection that makes sense to the end user. + * + * @return true if a difference was found. + */ + @Requires("diffs != null && other != null && thisRole != null && otherRole != null") + private boolean compareRequestedCovariates(final Map diffs, + final RecalibrationArgumentCollection other, final String thisRole, final String otherRole) { + + final Set beforeNames = new HashSet<>(this.COVARIATES.length); + final Set afterNames = new HashSet<>(other.COVARIATES.length); + Utils.addAll(beforeNames, this.COVARIATES); + Utils.addAll(afterNames,other.COVARIATES); + final Set intersect = new HashSet<>(Math.min(beforeNames.size(),afterNames.size())); + intersect.addAll(beforeNames); + intersect.retainAll(afterNames); + + String diffMessage = null; + if (intersect.size() == 0) { // In practice this is not possible due to required covariates but... + diffMessage = String.format("There are no common covariates between '%s' and '%s'" + + " recalibrator reports. Covariates in '%s': {%s}. Covariates in '%s': {%s}.",thisRole,otherRole, + thisRole,Utils.join(", ",this.COVARIATES), + otherRole,Utils.join(",",other.COVARIATES)); + } else if (intersect.size() != beforeNames.size() || intersect.size() != afterNames.size()) { + beforeNames.removeAll(intersect); + afterNames.removeAll(intersect); + diffMessage = String.format("There are differences in the set of covariates requested in the" + + " '%s' and '%s' recalibrator reports. " + + " Exclusive to '%s': {%s}. Exclusive to '%s': {%s}.",thisRole,otherRole, + thisRole,Utils.join(", ",beforeNames), + otherRole,Utils.join(", ",afterNames)); + } + if (diffMessage != null) { + diffs.put("covariate",diffMessage); + return true; + } else { + return false; + } + } + + /** + * Annotates a map with any difference encountered in a simple value report argument that differs between this an + * another {@link RecalibrationArgumentCollection} instance. + *

+ * The key of the new entry would be the name of that argument in the report file. The value is a message + * that explains the difference to the end user. + *

+ * + *

+ * This method should not return any exception. + * + * @param diffs where to annotate the differences. + * @param name the name of the report argument to compare. + * @param thisValue this argument collection value for that argument. + * @param otherValue the other collection value for that argument. + * @param thisRole the name used to refer to this RAC report that makes sense to the end user. + * @param otherRole the name used to refer to the other RAC report that makes sense to the end user. + * + * @type T the argument Object value type. + * + * @return true if a difference has been spotted, thus diff has been modified. + */ + private boolean compareSimpleReportArgument(final Map diffs, + final String name, final T thisValue, final T otherValue, final String thisRole, final String otherRole) { + if (thisValue == null && otherValue == null) { + return false; + } else if (thisValue != null && thisValue.equals(otherValue)) { + return false; + } else { + diffs.put(name, + String.format("differences between '%s' {%s} and '%s' {%s}.", + thisRole,thisValue == null ? "" : thisValue, + otherRole,otherValue == null ? "" : otherValue)); + return true; + } + + } + + /** + * Create a shallow copy of this argument collection. + * + * @return never null. + */ + @Override + public RecalibrationArgumentCollection clone() { + try { + return (RecalibrationArgumentCollection) super.clone(); + } catch (CloneNotSupportedException e) { + throw new StingException("Unreachable code clone not supported thrown when the class " + + this.getClass().getName() + " is cloneable ",e); + } + } + } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/HeaderElement.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/HeaderElement.java index 38b9e957b..ba2c2ae56 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/HeaderElement.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/HeaderElement.java @@ -207,7 +207,7 @@ public class HeaderElement { public void removeInsertionToTheRight() { this.insertionsToTheRight--; if (insertionsToTheRight < 0) - throw new ReviewedStingException("Removed too many insertions, header is now negative!"); + throw new ReviewedStingException("Removed too many insertions, header is now negative at position " + location); } public boolean hasInsertionToTheRight() { diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReads.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReads.java index eb55701ae..e636f8f17 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReads.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReads.java @@ -273,8 +273,9 @@ public class ReduceReads extends ReadWalker, Redu int nCompressedReads = 0; - Object2LongOpenHashMap readNameHash; // This hash will keep the name of the original read the new compressed name (a number). + private static int READ_NAME_HASH_DEFAULT_SIZE = 1000; Long nextReadNumber = 1L; // The next number to use for the compressed read name. + Object2LongOpenHashMap readNameHash; // This hash will keep the name of the original read the new compressed name (a number). ObjectSortedSet intervalList; @@ -313,7 +314,7 @@ public class ReduceReads extends ReadWalker, Redu knownSnpPositions = new ObjectAVLTreeSet(); GenomeAnalysisEngine toolkit = getToolkit(); - readNameHash = new Object2LongOpenHashMap(100000); // prepare the read name hash to keep track of what reads have had their read names compressed + this.resetReadNameHash(); // prepare the read name hash to keep track of what reads have had their read names compressed intervalList = new ObjectAVLTreeSet(); // get the interval list from the engine. If no interval list was provided, the walker will work in WGS mode if (toolkit.getIntervals() != null) @@ -335,6 +336,16 @@ public class ReduceReads extends ReadWalker, Redu } } + /** Initializer for {@link #readNameHash}. */ + private void resetReadNameHash() { + // If the hash grows large, subsequent clear operations can be very expensive, so trim the hash down if it grows beyond its default. + if (readNameHash == null || readNameHash.size() > READ_NAME_HASH_DEFAULT_SIZE) { + readNameHash = new Object2LongOpenHashMap(READ_NAME_HASH_DEFAULT_SIZE); + } else { + readNameHash.clear(); + } + } + /** * Takes in a read and prepares it for the SlidingWindow machinery by performing the * following optional clipping operations: @@ -471,7 +482,7 @@ public class ReduceReads extends ReadWalker, Redu // stash.compress(), the readNameHash can be cleared after the for() loop above. // The advantage of clearing the hash is that otherwise it holds all reads that have been encountered, // which can use a lot of memory and cause RR to slow to a crawl and/or run out of memory. - readNameHash.clear(); + this.resetReadNameHash(); } } else diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindow.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindow.java index 8843d6270..5115a6777 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindow.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindow.java @@ -878,7 +878,7 @@ public class SlidingWindow { int stop = region.getStop() - windowHeaderStart; // make sure the bitset is complete given the region (it might not be in multi-sample mode) - if ( region.getStop() > markedSites.getStartLocation() + markedSites.getVariantSiteBitSet().length ) + if ( region.getStop() > markedSites.getStartLocation() + markedSites.getVariantSiteBitSet().length - 1 ) markSites(region.getStop()); CloseVariantRegionResult closeVariantRegionResult = closeVariantRegion(start, stop, knownSnpPositions); @@ -1199,7 +1199,7 @@ public class SlidingWindow { } // Special case for leading insertions before the beginning of the sliding read - if ( ReadUtils.readStartsWithInsertion(read).getFirst() && (readStart == headerStart || headerStart < 0) ) { + if ( (readStart == headerStart || headerStart < 0) && ReadUtils.readStartsWithInsertion(read.getCigar(), false) != null ) { // create a new first element to the window header with no bases added header.addFirst(new HeaderElement(readStart - 1)); // this allows the first element (I) to look at locationIndex - 1 when we update the header and do the right thing diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/DiagnoseTargets.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/DiagnoseTargets.java index 4bd08294b..bde324e3c 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/DiagnoseTargets.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/DiagnoseTargets.java @@ -52,6 +52,7 @@ import org.broadinstitute.sting.commandline.Output; import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.downsampling.DownsampleType; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.*; import org.broadinstitute.sting.utils.GenomeLoc; @@ -110,6 +111,7 @@ import java.util.*; @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class} ) @By(value = DataSource.READS) @PartitionBy(PartitionType.INTERVAL) +@Downsample(by = DownsampleType.NONE) public class DiagnoseTargets extends LocusWalker { private static final String AVG_INTERVAL_DP_KEY = "IDP"; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/ThresHolder.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/ThresHolder.java index ebe2192b4..a6cbc1da3 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/ThresHolder.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/diagnosetargets/ThresHolder.java @@ -116,7 +116,7 @@ final class ThresHolder { @Argument(fullName = "quality_status_threshold", shortName = "stQ", doc = "The proportion of the loci needed for calling POOR_QUALITY", required = false) public double qualityStatusThreshold = 0.50; - @Output(fullName = "missing_intervals", shortName = "missing", doc ="Produces a file with the intervals that don't pass filters", required = false) + @Output(fullName = "missing_intervals", shortName = "missing", defaultToStdout = false, doc ="Produces a file with the intervals that don't pass filters", required = false) public PrintStream missingTargets = null; public final List locusMetricList = new LinkedList(); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/Metrics.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/Metrics.java new file mode 100644 index 000000000..5e3da5f4f --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/Metrics.java @@ -0,0 +1,110 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.diagnostics.missing; + +/** + * Short one line description of the walker. + *

+ *

+ * [Long description of the walker] + *

+ *

+ *

+ *

Input

+ *

+ * [Description of the Input] + *

+ *

+ *

Output

+ *

+ * [Description of the Output] + *

+ *

+ *

Examples

+ *
+ *    java
+ *      -jar GenomeAnalysisTK.jar
+ *      -T [walker name]
+ *  
+ * + * @author Mauricio Carneiro + * @since 5/1/13 + */ +final class Metrics { + private double gccontent; + private double baseQual; + private double mapQual; + private int reads; + private int refs; + + void reads(int reads) {this.reads = reads;} + void refs(int refs) {this.refs = refs;} + + void gccontent(double gccontent) {this.gccontent = gccontent;} + void baseQual(double baseQual) {this.baseQual = baseQual;} + void mapQual(double mapQual) {this.mapQual = mapQual;} + + double gccontent() {return refs > 0 ? gccontent/refs : 0.0;} + double baseQual() {return reads > 0 ? baseQual/reads : 0.0;} + double mapQual() {return reads > 0 ? mapQual/reads : 0.0;} + + /** + * Combines two metrics + * + * @param value the other metric to combine + * @return itself, for simple reduce + */ + public Metrics combine(Metrics value) { + this.gccontent += value.gccontent; + this.baseQual += value.baseQual; + this.mapQual += value.mapQual; + this.reads += value.reads; + this.refs += value.refs; + + return this; + } +} diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/QualifyMissingIntervals.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/QualifyMissingIntervals.java new file mode 100644 index 000000000..d0db3ef98 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/diagnostics/missing/QualifyMissingIntervals.java @@ -0,0 +1,228 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.diagnostics.missing; + +import org.broadinstitute.sting.commandline.Argument; +import org.broadinstitute.sting.commandline.Output; +import org.broadinstitute.sting.gatk.CommandLineGATK; +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.report.GATKReport; +import org.broadinstitute.sting.gatk.walkers.By; +import org.broadinstitute.sting.gatk.walkers.DataSource; +import org.broadinstitute.sting.gatk.walkers.LocusWalker; +import org.broadinstitute.sting.gatk.walkers.NanoSchedulable; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.GenomeLocSortedSet; +import org.broadinstitute.sting.utils.collections.Pair; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.help.DocumentedGATKFeature; +import org.broadinstitute.sting.utils.help.HelpConstants; +import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; +import org.broadinstitute.sting.utils.text.XReadLines; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.PrintStream; +import java.util.List; + +/** + * Walks along reference and calculates a few metrics for each interval. + * + * Metrics: + *
    + *
  • Average Base Quality
  • + *
  • Average Mapping Quality
  • + *
  • GC Content
  • + *
  • Position in the target
  • + *
  • Coding Sequence / Intron
  • + *
  • Length of the uncovered area
  • + *
+ * + *

Input

+ *

+ * A reference file (for GC content), the input bam file (for base and mapping quality calculation), the missing intervals (in the -L), the baits/targets used to sequence (in the -targets) and a bed file with the coding sequence intervals of the genome (in the -cds) + *

+ * + *

Output

+ *

+ * GC content calculations per interval. + *

+ * + *

Example

+ *
+ * java -Xmx2g -jar GenomeAnalysisTK.jar \
+ *   -T QualifyMissingIntervals \
+ *   -R ref.fasta \
+ *   -I input.bam \
+ *   -o output.grp \
+ *   -L input.intervals \
+ *   -cds cds.intervals \
+ *   -targets targets.intervals
+ * 
+ * + */ +@DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class} ) +@By(DataSource.REFERENCE) +public final class QualifyMissingIntervals extends LocusWalker implements NanoSchedulable { + @Output + protected PrintStream out; + + @Argument(shortName = "targets", required = true) + public File targetsFile; + + @Argument(shortName = "cds", required = false) + public File cdsFile = null; + + GATKReport simpleReport; + GenomeLocSortedSet target; + GenomeLocSortedSet cds; + + public boolean isReduceByInterval() { + return true; + } + + public void initialize() { + simpleReport = GATKReport.newSimpleReport("QualifyMissingIntervals", "IN", "GC", "BQ", "MQ", "TP", "CD", "LN"); + final GenomeLocParser parser = getToolkit().getGenomeLocParser(); + target = new GenomeLocSortedSet(parser); + cds = new GenomeLocSortedSet(parser); + parseFile(targetsFile, target, parser); + if (cdsFile != null) + parseFile(cdsFile, cds, parser); + } + + public Metrics reduceInit() { + return new Metrics(); + } + + public Metrics map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { + if (tracker == null) + return null; + + final Metrics metrics = new Metrics(); + final byte baseIndex = ref.getBase(); + final ReadBackedPileup pileup = context.getBasePileup(); + final int nBases = pileup.getNumberOfElements(); + + double baseQual = 0.0; + for (byte qual : pileup.getQuals()) { + baseQual += qual; + } + double mapQual = 0.0; + for (byte qual : pileup.getMappingQuals()) { + mapQual += qual; + } + + metrics.baseQual(baseQual); + metrics.mapQual(mapQual); + metrics.gccontent(baseIndex == 'C' || baseIndex == 'G' ? 1.0 : 0.0); + metrics.reads(nBases); + metrics.refs(1); + + return metrics; + } + + @Override + public Metrics reduce(Metrics value, Metrics sum) { + return sum.combine(value); + } + + public void onTraversalDone(List> results) { + for (Pair r : results) { + GenomeLoc interval = r.getFirst(); + Metrics metrics = r.getSecond(); + simpleReport.addRow( + interval.toString(), + metrics.gccontent(), + metrics.baseQual(), + metrics.mapQual(), + getPositionInTarget(interval), + cds.overlaps(interval), + interval.size() + ); + } + simpleReport.print(out); + out.close(); + } + + private static GenomeLoc parseInterval(String s, GenomeLocParser parser) { + if (s.isEmpty()) { + return null; + } + String[] first = s.split(":"); + if (first.length == 2) { + String[] second = first[1].split("\\-"); + return parser.createGenomeLoc(first[0], Integer.decode(second[0]), Integer.decode(second[1])); + } else { + throw new UserException.BadInput("Interval doesn't parse correctly: " + s); + } + } + + private void parseFile(File file, GenomeLocSortedSet set, GenomeLocParser parser) { + try { + for (String s : new XReadLines(file) ) { + GenomeLoc interval = parseInterval(s, parser); + if (interval != null) + set.add(interval, true); + } + } catch (FileNotFoundException e) { + e.printStackTrace(); + } + } + + private int getPositionInTarget(GenomeLoc interval) { + final List hits = target.getOverlapping(interval); + int result = 0; + for (GenomeLoc hit : hits) { + result = interval.getStart() - hit.getStart(); // if there are multiple hits, we'll get the last one. + } + return result; + } +} diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java index c6e9ea379..0f3f7739d 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java @@ -76,7 +76,8 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood private List alleleList = new ArrayList(); - protected IndelGenotypeLikelihoodsCalculationModel(UnifiedArgumentCollection UAC, Logger logger) { + protected IndelGenotypeLikelihoodsCalculationModel(final UnifiedArgumentCollection UAC, + final Logger logger) { super(UAC, logger); pairModel = new PairHMMIndelErrorModel(UAC.INDEL_GAP_OPEN_PENALTY, UAC.INDEL_GAP_CONTINUATION_PENALTY, UAC.OUTPUT_DEBUG_INDEL_INFO, UAC.pairHMM); @@ -85,10 +86,11 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood ignoreSNPAllelesWhenGenotypingIndels = UAC.IGNORE_SNP_ALLELES; } - protected static List computeConsensusAlleles(ReferenceContext ref, - Map contexts, - AlignmentContextUtils.ReadOrientation contextType, - GenomeLocParser locParser, UnifiedArgumentCollection UAC) { + protected static List computeConsensusAlleles(final ReferenceContext ref, + final Map contexts, + final AlignmentContextUtils.ReadOrientation contextType, + final GenomeLocParser locParser, + final UnifiedArgumentCollection UAC) { ConsensusAlleleCounter counter = new ConsensusAlleleCounter(locParser, true, UAC.MIN_INDEL_COUNT_FOR_GENOTYPING, UAC.MIN_INDEL_FRACTION_PER_SAMPLE); return counter.computeConsensusAlleles(ref, contexts, contextType); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/SNPGenotypeLikelihoodsCalculationModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/SNPGenotypeLikelihoodsCalculationModel.java index ce5f94478..360f88e51 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/SNPGenotypeLikelihoodsCalculationModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/SNPGenotypeLikelihoodsCalculationModel.java @@ -147,9 +147,17 @@ public class SNPGenotypeLikelihoodsCalculationModel extends GenotypeLikelihoodsC // if we only want variants, then we don't need to calculate genotype likelihoods if ( UAC.OutputMode == UnifiedGenotyperEngine.OUTPUT_MODE.EMIT_VARIANTS_ONLY ) return builder.make(); + // if user requires all PLs at all sites, add all possible alt alleles + else if (UAC.annotateAllSitesWithPLs) { + for ( final byte base : BaseUtils.BASES ) { + if ( base != refBase ) + alleles.add(Allele.create(base)); + } + } - // otherwise, choose any alternate allele (it doesn't really matter) - alleles.add(Allele.create(BaseUtils.baseIndexToSimpleBase(indexOfRefBase == 0 ? 1 : 0))); + else + // otherwise, choose any alternate allele (it doesn't really matter) + alleles.add(Allele.create(BaseUtils.baseIndexToSimpleBase(indexOfRefBase == 0 ? 1 : 0))); } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedArgumentCollection.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedArgumentCollection.java index e346b10b7..f156468cc 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedArgumentCollection.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedArgumentCollection.java @@ -52,6 +52,9 @@ import org.broadinstitute.sting.utils.pairhmm.PairHMM; import org.broadinstitute.sting.utils.variant.GATKVariantContextUtils; import org.broadinstitute.variant.variantcontext.VariantContext; +import java.util.Collections; +import java.util.List; + public class UnifiedArgumentCollection extends StandardCallerArgumentCollection { @Argument(fullName = "genotype_likelihoods_model", shortName = "glm", doc = "Genotype likelihoods calculation model to employ -- SNP is the default option, while INDEL is also available for calling indels and BOTH is available for calling both together", required = false) @@ -82,7 +85,7 @@ public class UnifiedArgumentCollection extends StandardCallerArgumentCollection * The PairHMM implementation to use for -glm INDEL genotype likelihood calculations. The various implementations balance a tradeoff of accuracy and runtime. */ @Argument(fullName = "pair_hmm_implementation", shortName = "pairHMM", doc = "The PairHMM implementation to use for -glm INDEL genotype likelihood calculations", required = false) - public PairHMM.HMM_IMPLEMENTATION pairHMM = PairHMM.HMM_IMPLEMENTATION.ORIGINAL; + public PairHMM.HMM_IMPLEMENTATION pairHMM = PairHMM.HMM_IMPLEMENTATION.LOGLESS_CACHING; /** * The minimum confidence needed in a given base for it to be used in variant calling. Note that the base quality of a base @@ -95,6 +98,18 @@ public class UnifiedArgumentCollection extends StandardCallerArgumentCollection @Argument(fullName = "max_deletion_fraction", shortName = "deletions", doc = "Maximum fraction of reads with deletions spanning this locus for it to be callable [to disable, set to < 0 or > 1; default:0.05]", required = false) public Double MAX_DELETION_FRACTION = 0.05; + /** + * Advanced, experimental argument: if SNP likelihood model is specified, and if EMIT_ALL_SITES output mode is set, when we set this argument then we will also emit PLs at all sites. + * This will give a measure of reference confidence and a measure of which alt alleles are more plausible (if any). + * WARNINGS: + * - This feature will inflate VCF file size considerably. + * - All SNP ALT alleles will be emitted with corresponding 10 PL values. + * - An error will be emitted if EMIT_ALL_SITES is not set, or if anything other than diploid SNP model is used + */ + @Advanced + @Argument(fullName = "allSitePLs", shortName = "allSitePLs", doc = "Annotate all sites with PLs", required = false) + public boolean annotateAllSitesWithPLs = false; + // indel-related arguments /** * A candidate indel is genotyped (and potentially called) if there are this number of reads with a consensus indel at a site. @@ -247,7 +262,7 @@ public class UnifiedArgumentCollection extends StandardCallerArgumentCollection this.EXCLUDE_FILTERED_REFERENCE_SITES = uac.EXCLUDE_FILTERED_REFERENCE_SITES; this.IGNORE_LANE_INFO = uac.IGNORE_LANE_INFO; this.pairHMM = uac.pairHMM; - + this.annotateAllSitesWithPLs = uac.annotateAllSitesWithPLs; // todo- arguments to remove this.IGNORE_SNP_ALLELES = uac.IGNORE_SNP_ALLELES; } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java index 3380efcc9..9f3368cf8 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperEngine.java @@ -83,6 +83,9 @@ public class UnifiedGenotyperEngine { public static final double HUMAN_SNP_HETEROZYGOSITY = 1e-3; public static final double HUMAN_INDEL_HETEROZYGOSITY = 1e-4; + private static final int SNP_MODEL = 0; + private static final int INDEL_MODEL = 1; + public enum OUTPUT_MODE { /** produces calls only at variant sites */ EMIT_VARIANTS_ONLY, @@ -165,6 +168,13 @@ public class UnifiedGenotyperEngine { filter.add(LOW_QUAL_FILTER_NAME); determineGLModelsToUse(); + + // do argument checking + if (UAC.annotateAllSitesWithPLs) { + if (!modelsToUse.contains(GenotypeLikelihoodsCalculationModel.Model.SNP)) + throw new IllegalArgumentException("Invalid genotype likelihood model specification: Only diploid SNP model can be used in conjunction with option allSitePLs"); + + } } /** @@ -436,7 +446,8 @@ public class UnifiedGenotyperEngine { bestGuessIsRef = false; } // if in GENOTYPE_GIVEN_ALLELES mode, we still want to allow the use of a poor allele - else if ( UAC.GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES ) { + else if ( UAC.GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES || + UAC.annotateAllSitesWithPLs) { myAlleles.add(alternateAllele); alleleCountsofMLE.add(AFresult.getAlleleCountAtMLE(alternateAllele)); } @@ -446,7 +457,7 @@ public class UnifiedGenotyperEngine { // note the math.abs is necessary because -10 * 0.0 => -0.0 which isn't nice final double phredScaledConfidence = - Math.abs(! bestGuessIsRef || UAC.GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES + Math.abs(! bestGuessIsRef || UAC.GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES || UAC.annotateAllSitesWithPLs ? -10 * AFresult.getLog10PosteriorOfAFEq0() : -10 * AFresult.getLog10PosteriorOfAFGT0()); @@ -540,11 +551,6 @@ public class UnifiedGenotyperEngine { builder.attributes(attributes); VariantContext vcCall = builder.make(); - // if we are subsetting alleles (either because there were too many or because some were not polymorphic) - // then we may need to trim the alleles (because the original VariantContext may have had to pad at the end). - if ( myAlleles.size() != vc.getAlleles().size() && !limitedContext ) // limitedContext callers need to handle allele trimming on their own to keep their perReadAlleleLikelihoodMap alleles in sync - vcCall = GATKVariantContextUtils.reverseTrimAlleles(vcCall); - if ( annotationEngine != null && !limitedContext ) { // limitedContext callers need to handle annotations on their own by calling their own annotationEngine // Note: we want to use the *unfiltered* and *unBAQed* context for the annotations final ReadBackedPileup pileup = rawContext.getBasePileup(); @@ -553,6 +559,11 @@ public class UnifiedGenotyperEngine { vcCall = annotationEngine.annotateContext(tracker, refContext, stratifiedContexts, vcCall, perReadAlleleLikelihoodMap); } + // if we are subsetting alleles (either because there were too many or because some were not polymorphic) + // then we may need to trim the alleles (because the original VariantContext may have had to pad at the end). + if ( myAlleles.size() != vc.getAlleles().size() && !limitedContext ) // limitedContext callers need to handle allele trimming on their own to keep their perReadAlleleLikelihoodMap alleles in sync + vcCall = GATKVariantContextUtils.reverseTrimAlleles(vcCall); + return new VariantCallContext(vcCall, confidentlyCalled(phredScaledConfidence, PoFGT0)); } @@ -693,13 +704,13 @@ public class UnifiedGenotyperEngine { } private void determineGLModelsToUse() { - String modelPrefix = ""; if ( !UAC.GLmodel.name().contains(GPSTRING) && UAC.samplePloidy != GATKVariantContextUtils.DEFAULT_PLOIDY ) modelPrefix = GPSTRING; - if ( UAC.GLmodel.name().toUpperCase().contains("BOTH") ) { - modelPrefix += UAC.GLmodel.name().toUpperCase().replaceAll("BOTH",""); + // GGA mode => must initialize both the SNP and indel models + if ( UAC.GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES || + UAC.GLmodel.name().toUpperCase().contains("BOTH") ) { modelsToUse.add(GenotypeLikelihoodsCalculationModel.Model.valueOf(modelPrefix+"SNP")); modelsToUse.add(GenotypeLikelihoodsCalculationModel.Model.valueOf(modelPrefix+"INDEL")); } @@ -712,31 +723,24 @@ public class UnifiedGenotyperEngine { private List getGLModelsToUse(final RefMetaDataTracker tracker, final ReferenceContext refContext, final AlignmentContext rawContext) { - if ( UAC.GenotypingMode != GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES ) return modelsToUse; + if ( modelsToUse.size() != 2 ) + throw new IllegalStateException("GGA mode assumes that we have initialized both the SNP and indel models but found " + modelsToUse); + // if we're genotyping given alleles then we need to choose the model corresponding to the variant type requested - final List GGAmodel = new ArrayList(1); final VariantContext vcInput = getVCFromAllelesRod(tracker, refContext, rawContext.getLocation(), false, logger, UAC.alleles); - if ( vcInput == null ) - return GGAmodel; // no work to be done - if ( vcInput.isSNP() ) { - // use the SNP model unless the user chose INDEL mode only - if ( modelsToUse.size() == 2 || modelsToUse.get(0).name().endsWith("SNP") ) - GGAmodel.add(modelsToUse.get(0)); + if ( vcInput == null ) { + return Collections.emptyList(); // no work to be done + } else if ( vcInput.isSNP() ) { + return Collections.singletonList(modelsToUse.get(SNP_MODEL)); + } else if ( vcInput.isIndel() || vcInput.isMixed() ) { + return Collections.singletonList(modelsToUse.get(INDEL_MODEL)); + } else { + return Collections.emptyList(); // No support for other types yet } - else if ( vcInput.isIndel() || vcInput.isMixed() ) { - // use the INDEL model unless the user chose SNP mode only - if ( modelsToUse.size() == 2 ) - GGAmodel.add(modelsToUse.get(1)); - else if ( modelsToUse.get(0).name().endsWith("INDEL") ) - GGAmodel.add(modelsToUse.get(0)); - } - // No support for other types yet - - return GGAmodel; } /** diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/afcalc/DiploidExactAFCalc.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/afcalc/DiploidExactAFCalc.java index 170b6e250..2ece18002 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/afcalc/DiploidExactAFCalc.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/afcalc/DiploidExactAFCalc.java @@ -106,7 +106,7 @@ public abstract class DiploidExactAFCalc extends ExactAFCalc { alleles.add(vc.getReference()); alleles.addAll(chooseMostLikelyAlternateAlleles(vc, getMaxAltAlleles())); builder.alleles(alleles); - builder.genotypes(GATKVariantContextUtils.subsetDiploidAlleles(vc, alleles, false)); + builder.genotypes(GATKVariantContextUtils.subsetDiploidAlleles(vc, alleles, GATKVariantContextUtils.GenotypeAssignmentMethod.SET_TO_NO_CALL)); return builder.make(); } else { return vc; @@ -352,6 +352,9 @@ public abstract class DiploidExactAFCalc extends ExactAFCalc { final List allelesToUse, final boolean assignGenotypes, final int ploidy) { - return GATKVariantContextUtils.subsetDiploidAlleles(vc, allelesToUse, assignGenotypes); + return allelesToUse.size() == 1 + ? GATKVariantContextUtils.subsetToRefOnly(vc, ploidy) + : GATKVariantContextUtils.subsetDiploidAlleles(vc, allelesToUse, + assignGenotypes ? GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN : GATKVariantContextUtils.GenotypeAssignmentMethod.SET_TO_NO_CALL); } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java index 48972dfd5..d876a403b 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java @@ -77,6 +77,7 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { private final static int NUM_PATHS_PER_GRAPH = 25; private static final int KMER_OVERLAP = 5; // the additional size of a valid chunk of sequence, used to string together k-mers private static final int GRAPH_KMER_STEP = 6; + private static final int GGA_MODE_ARTIFICIAL_COUNTS = 1000; private final int minKmer; private final int onlyBuildKmersOfThisSizeWhenDebuggingGraphAlgorithms; @@ -92,8 +93,8 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { } @Override - protected List assemble(final List reads, final Haplotype refHaplotype) { - final List graphs = new LinkedList(); + protected List assemble(final List reads, final Haplotype refHaplotype, final List activeAlleleHaplotypes ) { + final List graphs = new LinkedList<>(); final int maxKmer = ReadUtils.getMaxReadLength(reads) - KMER_OVERLAP - 1; if( maxKmer < minKmer) { @@ -106,7 +107,7 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { continue; if ( debug ) logger.info("Creating de Bruijn graph for " + kmer + " kmer using " + reads.size() + " reads"); - DeBruijnGraph graph = createGraphFromSequences( reads, kmer, refHaplotype); + DeBruijnGraph graph = createGraphFromSequences(reads, kmer, refHaplotype, activeAlleleHaplotypes); if( graph != null ) { // graphs that fail during creation ( for example, because there are cycles in the reference graph ) will show up here as a null graph object // do a series of steps to clean up the raw assembly graph to make it analysis-ready if ( debugGraphTransformations ) graph.printGraph(new File("unpruned.dot"), pruneFactor); @@ -133,7 +134,7 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { } @Requires({"reads != null", "kmerLength > 0", "refHaplotype != null"}) - protected DeBruijnGraph createGraphFromSequences( final List reads, final int kmerLength, final Haplotype refHaplotype ) { + protected DeBruijnGraph createGraphFromSequences( final List reads, final int kmerLength, final Haplotype refHaplotype, final List activeAlleleHaplotypes ) { final DeBruijnGraph graph = new DeBruijnGraph(kmerLength); final DeBruijnGraphBuilder builder = new DeBruijnGraphBuilder(graph); @@ -142,6 +143,11 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { // something went wrong, so abort right now with a null graph return null; + // add the artificial GGA haplotypes to the graph + if ( ! addGGAKmersToGraph(builder, activeAlleleHaplotypes) ) + // something went wrong, so abort right now with a null graph + return null; + // now go through the graph already seeded with the reference sequence and add the read kmers to it if ( ! addReadKmersToGraph(builder, reads) ) // some problem was detected adding the reads to the graph, return null to indicate we failed @@ -151,6 +157,28 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { return graph; } + /** + * Add the high-quality kmers from the artificial GGA haplotypes to the graph + * + * @param builder a debruijn graph builder to add the read kmers to + * @param activeAlleleHaplotypes a list of haplotypes to add to the graph for GGA mode + * @return true if we successfully added the read kmers to the graph without corrupting it in some way + */ + protected boolean addGGAKmersToGraph(final DeBruijnGraphBuilder builder, final List activeAlleleHaplotypes) { + + final int kmerLength = builder.getKmerSize(); + + for( final Haplotype haplotype : activeAlleleHaplotypes ) { + final int end = haplotype.length() - kmerLength; + for( int start = 0; start < end; start++ ) { + builder.addKmerPairFromSeqToGraph( haplotype.getBases(), start, GGA_MODE_ARTIFICIAL_COUNTS ); + } + } + + // always returns true now, but it's possible that we'd add kmers and decide we don't like the graph in some way + return true; + } + /** * Add the high-quality kmers from the reads to the graph * diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java index 419ea378f..04173b64f 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java @@ -49,6 +49,7 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import com.google.java.contract.Ensures; import com.google.java.contract.Requires; import org.apache.log4j.Logger; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.annotator.VariantAnnotatorEngine; import org.broadinstitute.sting.gatk.walkers.genotyper.GenotypeLikelihoodsCalculationModel; import org.broadinstitute.sting.gatk.walkers.genotyper.UnifiedGenotyperEngine; @@ -71,7 +72,7 @@ public class GenotypingEngine { private final boolean DEBUG; private final boolean USE_FILTERED_READ_MAP_FOR_ANNOTATIONS; - private final static List noCall = new ArrayList(); // used to noCall all genotypes until the exact model is applied + private final static List noCall = new ArrayList<>(); // used to noCall all genotypes until the exact model is applied private final VariantAnnotatorEngine annotationEngine; private final MergeVariantsAcrossHaplotypes crossHaplotypeEventMerger; @@ -146,6 +147,7 @@ public class GenotypingEngine { final GenomeLoc refLoc, final GenomeLoc activeRegionWindow, final GenomeLocParser genomeLocParser, + final RefMetaDataTracker tracker, final List activeAllelesToGenotype ) { // sanity check input arguments if (UG_engine == null) throw new IllegalArgumentException("UG_Engine input can't be null, got "+UG_engine); @@ -162,8 +164,8 @@ public class GenotypingEngine { final TreeSet startPosKeySet = decomposeHaplotypesIntoVariantContexts(haplotypes, haplotypeReadMap, ref, refLoc, activeAllelesToGenotype); // Walk along each position in the key set and create each event to be outputted - final Set calledHaplotypes = new HashSet(); - final List returnCalls = new ArrayList(); + final Set calledHaplotypes = new HashSet<>(); + final List returnCalls = new ArrayList<>(); for( final int loc : startPosKeySet ) { if( loc >= activeRegionWindow.getStart() && loc <= activeRegionWindow.getStop() ) { // genotyping an event inside this active region final List eventsAtThisLoc = getVCsAtThisLocation(haplotypes, loc, activeAllelesToGenotype); @@ -183,7 +185,7 @@ public class GenotypingEngine { if( eventsAtThisLoc.size() != mergedVC.getAlternateAlleles().size() ) { throw new ReviewedStingException("Record size mismatch! Something went wrong in the merging of alleles."); } - final Map mergeMap = new LinkedHashMap(); + final Map mergeMap = new LinkedHashMap<>(); mergeMap.put(null, mergedVC.getReference()); // the reference event (null) --> the reference allele for(int iii = 0; iii < mergedVC.getAlternateAlleles().size(); iii++) { mergeMap.put(eventsAtThisLoc.get(iii), mergedVC.getAlternateAllele(iii)); // BUGBUG: This is assuming that the order of alleles is the same as the priority list given to simpleMerge function @@ -204,13 +206,12 @@ public class GenotypingEngine { convertHaplotypeReadMapToAlleleReadMap( haplotypeReadMap, alleleMapper, 0.0 ) ); final Map stratifiedReadMap = filterToOnlyOverlappingReads( genomeLocParser, alleleReadMap_annotations, perSampleFilteredReadList, call ); - VariantContext annotatedCall = call; - if( annotatedCall.getAlleles().size() != mergedVC.getAlleles().size() ) { // some alleles were removed so reverseTrimming might be necessary! + VariantContext annotatedCall = annotationEngine.annotateContextForActiveRegion(tracker, stratifiedReadMap, call); + + if( call.getAlleles().size() != mergedVC.getAlleles().size() ) { // some alleles were removed so reverseTrimming might be necessary! annotatedCall = GATKVariantContextUtils.reverseTrimAlleles(annotatedCall); } - annotatedCall = annotationEngine.annotateContext(stratifiedReadMap, annotatedCall); - // maintain the set of all called haplotypes for ( final Allele calledAllele : call.getAlleles() ) calledHaplotypes.addAll(alleleMapper.get(calledAllele)); @@ -244,7 +245,7 @@ public class GenotypingEngine { if ( in_GGA_mode ) startPosKeySet.clear(); - cleanUpSymbolicUnassembledEvents( haplotypes ); + //cleanUpSymbolicUnassembledEvents( haplotypes ); // We don't make symbolic alleles so this isn't needed currently if ( !in_GGA_mode ) { // run the event merger if we're not in GGA mode final boolean mergedAnything = crossHaplotypeEventMerger.merge(haplotypes, haplotypeReadMap, startPosKeySet, ref, refLoc); @@ -267,7 +268,7 @@ public class GenotypingEngine { * @return the list of the sources of vcs in the same order */ private List makePriorityList(final List vcs) { - final List priorityList = new LinkedList(); + final List priorityList = new LinkedList<>(); for ( final VariantContext vc : vcs ) priorityList.add(vc.getSource()); return priorityList; } @@ -276,7 +277,7 @@ public class GenotypingEngine { final int loc, final List activeAllelesToGenotype) { // the overlapping events to merge into a common reference view - final List eventsAtThisLoc = new ArrayList(); + final List eventsAtThisLoc = new ArrayList<>(); if( activeAllelesToGenotype.isEmpty() ) { for( final Haplotype h : haplotypes ) { @@ -292,7 +293,7 @@ public class GenotypingEngine { if( compVC.getStart() == loc ) { int alleleCount = 0; for( final Allele compAltAllele : compVC.getAlternateAlleles() ) { - List alleleSet = new ArrayList(2); + List alleleSet = new ArrayList<>(2); alleleSet.add(compVC.getReference()); alleleSet.add(compAltAllele); final String vcSourceName = "Comp" + compCount + "Allele" + alleleCount; @@ -348,7 +349,7 @@ public class GenotypingEngine { final Map> perSampleFilteredReadList, final VariantContext call ) { - final Map returnMap = new LinkedHashMap(); + final Map returnMap = new LinkedHashMap<>(); final GenomeLoc callLoc = parser.createGenomeLoc(call); for( final Map.Entry sample : perSampleReadMap.entrySet() ) { final PerReadAlleleLikelihoodMap likelihoodMap = new PerReadAlleleLikelihoodMap(); @@ -384,7 +385,7 @@ public class GenotypingEngine { // TODO - split into input haplotypes and output haplotypes as not to share I/O arguments @Requires("haplotypes != null") protected static void cleanUpSymbolicUnassembledEvents( final List haplotypes ) { - final List haplotypesToRemove = new ArrayList(); + final List haplotypesToRemove = new ArrayList<>(); for( final Haplotype h : haplotypes ) { for( final VariantContext vc : h.getEventMap().getVariantContexts() ) { if( vc.isSymbolic() ) { @@ -407,7 +408,7 @@ public class GenotypingEngine { final Map> alleleMapper, final double downsamplingFraction ) { - final Map alleleReadMap = new LinkedHashMap(); + final Map alleleReadMap = new LinkedHashMap<>(); for( final Map.Entry haplotypeReadMapEntry : haplotypeReadMap.entrySet() ) { // for each sample final PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap = new PerReadAlleleLikelihoodMap(); for( final Map.Entry> alleleMapperEntry : alleleMapper.entrySet() ) { // for each output allele @@ -430,7 +431,7 @@ public class GenotypingEngine { } protected static Map> createAlleleMapper( final Map mergeMap, final Map> eventMap ) { - final Map> alleleMapper = new LinkedHashMap>(); + final Map> alleleMapper = new LinkedHashMap<>(); for( final Map.Entry entry : mergeMap.entrySet() ) { alleleMapper.put(entry.getValue(), eventMap.get(new Event(entry.getKey()))); } @@ -441,100 +442,33 @@ public class GenotypingEngine { @Ensures({"result.size() == eventsAtThisLoc.size() + 1"}) protected static Map> createEventMapper( final int loc, final List eventsAtThisLoc, final List haplotypes ) { - final Map> eventMapper = new LinkedHashMap>(eventsAtThisLoc.size()+1); - VariantContext refVC = eventsAtThisLoc.get(0); // the genome loc is the only safe thing to pull out of this VC because ref/alt pairs might change reference basis - eventMapper.put(new Event(null), new ArrayList()); + final Map> eventMapper = new LinkedHashMap<>(eventsAtThisLoc.size()+1); + final Event refEvent = new Event(null); + eventMapper.put(refEvent, new ArrayList()); for( final VariantContext vc : eventsAtThisLoc ) { eventMapper.put(new Event(vc), new ArrayList()); } - final List undeterminedHaplotypes = new ArrayList(haplotypes.size()); for( final Haplotype h : haplotypes ) { - if( h.isArtificialHaplotype() && loc == h.getArtificialAllelePosition() ) { - final List alleles = new ArrayList(2); - alleles.add(h.getArtificialRefAllele()); - alleles.add(h.getArtificialAltAllele()); - final Event artificialVC = new Event( (new VariantContextBuilder()).source("artificialHaplotype") - .alleles(alleles) - .loc(refVC.getChr(), refVC.getStart(), refVC.getStart() + h.getArtificialRefAllele().length() - 1).make() ); - if( eventMapper.containsKey(artificialVC) ) { - eventMapper.get(artificialVC).add(h); - } - } else if( h.getEventMap().get(loc) == null ) { // no event at this location so let's investigate later - undeterminedHaplotypes.add(h); + if( h.getEventMap().get(loc) == null ) { + eventMapper.get(refEvent).add(h); } else { - boolean haplotypeIsDetermined = false; for( final VariantContext vcAtThisLoc : eventsAtThisLoc ) { if( h.getEventMap().get(loc).hasSameAllelesAs(vcAtThisLoc) ) { eventMapper.get(new Event(vcAtThisLoc)).add(h); - haplotypeIsDetermined = true; break; } } - - if( !haplotypeIsDetermined ) - undeterminedHaplotypes.add(h); } } - for( final Haplotype h : undeterminedHaplotypes ) { - Event matchingEvent = new Event(null); - for( final Map.Entry> eventToTest : eventMapper.entrySet() ) { - // don't test against the reference allele - if( eventToTest.getKey().equals(new Event(null)) ) - continue; - - // only try to disambiguate for alleles that have had haplotypes previously assigned above - if( eventToTest.getValue().isEmpty() ) - continue; - - final Haplotype artificialHaplotype = eventToTest.getValue().get(0); - if( isSubSetOf(artificialHaplotype.getEventMap(), h.getEventMap(), true) ) { - matchingEvent = eventToTest.getKey(); - break; - } - } - - eventMapper.get(matchingEvent).add(h); - } - return eventMapper; } - protected static boolean isSubSetOf(final Map subset, final Map superset, final boolean resolveSupersetToSubset) { - - for ( final Map.Entry fromSubset : subset.entrySet() ) { - final VariantContext fromSuperset = superset.get(fromSubset.getKey()); - if ( fromSuperset == null ) - return false; - - List supersetAlleles = fromSuperset.getAlternateAlleles(); - if ( resolveSupersetToSubset ) - supersetAlleles = resolveAlternateAlleles(fromSubset.getValue().getReference(), fromSuperset.getReference(), supersetAlleles); - - if ( !supersetAlleles.contains(fromSubset.getValue().getAlternateAllele(0)) ) - return false; - } - - return true; - } - - private static List resolveAlternateAlleles(final Allele targetReference, final Allele actualReference, final List currentAlleles) { - if ( targetReference.length() <= actualReference.length() ) - return currentAlleles; - - final List newAlleles = new ArrayList(currentAlleles.size()); - final byte[] extraBases = Arrays.copyOfRange(targetReference.getBases(), actualReference.length(), targetReference.length()); - for ( final Allele a : currentAlleles ) { - newAlleles.add(Allele.extend(a, extraBases)); - } - return newAlleles; - } - @Ensures({"result.size() == haplotypeAllelesForSample.size()"}) protected static List findEventAllelesInSample( final List eventAlleles, final List haplotypeAlleles, final List haplotypeAllelesForSample, final List> alleleMapper, final List haplotypes ) { if( haplotypeAllelesForSample.contains(Allele.NO_CALL) ) { return noCall; } - final List eventAllelesForSample = new ArrayList(); + final List eventAllelesForSample = new ArrayList<>(); for( final Allele a : haplotypeAllelesForSample ) { final Haplotype haplotype = haplotypes.get(haplotypeAlleles.indexOf(a)); for( int iii = 0; iii < alleleMapper.size(); iii++ ) { diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index db184854b..87f1ae75c 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -47,6 +47,10 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import com.google.java.contract.Ensures; +import net.sf.samtools.Cigar; +import net.sf.samtools.CigarElement; +import net.sf.samtools.CigarOperator; +import net.sf.samtools.SAMFileWriter; import org.broadinstitute.sting.commandline.*; import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.arguments.DbsnpArgumentCollection; @@ -76,8 +80,6 @@ import org.broadinstitute.sting.utils.activeregion.ActivityProfileState; import org.broadinstitute.sting.utils.clipping.ReadClipper; import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; -import org.broadinstitute.sting.utils.fragments.FragmentCollection; -import org.broadinstitute.sting.utils.fragments.FragmentUtils; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.utils.haplotype.*; import org.broadinstitute.sting.utils.haplotypeBAMWriter.HaplotypeBAMWriter; @@ -219,7 +221,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In */ @Advanced @Argument(fullName="annotation", shortName="A", doc="One or more specific annotations to apply to variant calls", required=false) - protected List annotationsToUse = new ArrayList(Arrays.asList(new String[]{"ClippingRankSumTest"})); + protected List annotationsToUse = new ArrayList<>(Arrays.asList(new String[]{"ClippingRankSumTest", "DepthPerSampleHC"})); /** * Which annotations to exclude from output in the VCF file. Note that this argument has higher priority than the -A or -G arguments, @@ -262,6 +264,14 @@ public class HaplotypeCaller extends ActiveRegionWalker, In @Argument(fullName="kmerSize", shortName="kmerSize", doc="Kmer size to use in the read threading assembler", required = false) protected List kmerSizes = Arrays.asList(10, 25); + @Advanced + @Argument(fullName="dontIncreaseKmerSizesForCycles", shortName="dontIncreaseKmerSizesForCycles", doc="Should we disable the iterating over kmer sizes when graph cycles are detected?", required = false) + protected boolean dontIncreaseKmerSizesForCycles = false; + + @Advanced + @Argument(fullName="numPruningSamples", shortName="numPruningSamples", doc="The number of samples that must pass the minPuning factor in order for the path to be kept", required = false) + protected int numPruningSamples = 1; + /** * Assembly graph can be quite complex, and could imply a very large number of possible haplotypes. Each haplotype * considered requires N PairHMM evaluations if there are N reads across all samples. In order to control the @@ -328,7 +338,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In */ @Advanced @Argument(fullName="phredScaledGlobalReadMismappingRate", shortName="globalMAPQ", doc="The global assumed mismapping rate for reads", required = false) - protected int phredScaledGlobalReadMismappingRate = 60; + protected int phredScaledGlobalReadMismappingRate = 45; @Advanced @Argument(fullName="maxNumHaplotypesInPopulation", shortName="maxNumHaplotypesInPopulation", doc="Maximum number of haplotypes to consider for your population. This number will probably need to be increased when calling organisms with high heterozygosity.", required = false) @@ -384,6 +394,10 @@ public class HaplotypeCaller extends ActiveRegionWalker, In @Argument(fullName="dontUseSoftClippedBases", shortName="dontUseSoftClippedBases", doc="If specified, we will not analyze soft clipped bases in the reads", required = false) protected boolean dontUseSoftClippedBases = false; + @Hidden + @Argument(fullName="captureAssemblyFailureBAM", shortName="captureAssemblyFailureBAM", doc="If specified, we will write a BAM called assemblyFailure.bam capturing all of the reads that were in the active region when the assembler failed for any reason", required = false) + protected boolean captureAssemblyFailureBAM = false; + @Hidden @Argument(fullName="allowCyclesInKmerGraphToGeneratePaths", shortName="allowCyclesInKmerGraphToGeneratePaths", doc="If specified, we will allow cycles in the kmer graphs to generate paths with multiple copies of the path sequenece rather than just the shortest paths", required = false) protected boolean allowCyclesInKmerGraphToGeneratePaths = false; @@ -392,6 +406,20 @@ public class HaplotypeCaller extends ActiveRegionWalker, In @Argument(fullName="noFpga", shortName="noFpga", doc="If provided, disables the use of the FPGA HMM implementation", required = false) protected boolean noFpga = false; + // Parameters to control read error correction + @Hidden + @Argument(fullName="errorCorrectReads", shortName="errorCorrectReads", doc = "Use an exploratory algorithm to error correct the kmers used during assembly. May cause fundamental problems with the assembly graph itself", required=false) + protected boolean errorCorrectReads = false; + + @Hidden + @Argument(fullName="kmerLengthForReadErrorCorrection", shortName="kmerLengthForReadErrorCorrection", doc = "Use an exploratory algorithm to error correct the kmers used during assembly. May cause fundamental problems with the assembly graph itself", required=false) + protected int kmerLengthForReadErrorCorrection = 25; + + @Hidden + @Argument(fullName="minObservationsForKmerToBeSolid", shortName="minObservationsForKmerToBeSolid", doc = "A k-mer must be seen at least these times for it considered to be solid", required=false) + protected int minObservationsForKmerToBeSolid = 20; + + // ----------------------------------------------------------------------------------------------- // done with Haplotype caller parameters // ----------------------------------------------------------------------------------------------- @@ -422,7 +450,8 @@ public class HaplotypeCaller extends ActiveRegionWalker, In private final static int PADDING_AROUND_OTHERS_FOR_CALLING = 150; // the maximum extent into the full active region extension that we're willing to go in genotyping our events - private final static int MAX_GENOTYPING_ACTIVE_REGION_EXTENSION = 25; + private final static int MAX_DISCOVERY_ACTIVE_REGION_EXTENSION = 25; + private final static int MAX_GGA_ACTIVE_REGION_EXTENSION = 100; private ActiveRegionTrimmer trimmer = null; @@ -432,13 +461,11 @@ public class HaplotypeCaller extends ActiveRegionWalker, In // bases with quality less than or equal to this value are trimmed off the tails of the reads private static final byte MIN_TAIL_QUALITY = 20; + private static final byte MIN_TAIL_QUALITY_WITH_ERROR_CORRECTION = 6; // the minimum length of a read we'd consider using for genotyping private final static int MIN_READ_LENGTH = 10; - private List samplesList = new ArrayList(); - private final static double LOG_ONE_HALF = -Math.log10(2.0); - private final static double LOG_ONE_THIRD = -Math.log10(3.0); - private final List allelesToGenotype = new ArrayList(); + private List samplesList = new ArrayList<>(); private final static Allele FAKE_REF_ALLELE = Allele.create("N", true); // used in isActive function to call into UG Engine. Should never appear anywhere in a VCF file private final static Allele FAKE_ALT_ALLELE = Allele.create("", false); // used in isActive function to call into UG Engine. Should never appear anywhere in a VCF file @@ -518,7 +545,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In final int maxAllowedPathsForReadThreadingAssembler = Math.max(maxPathsPerSample * nSamples, MIN_PATHS_PER_GRAPH); assemblyEngine = useDebruijnAssembler ? new DeBruijnAssembler(minKmerForDebruijnAssembler, onlyUseKmerSizeForDebruijnAssembler) - : new ReadThreadingAssembler(maxAllowedPathsForReadThreadingAssembler, kmerSizes); + : new ReadThreadingAssembler(maxAllowedPathsForReadThreadingAssembler, kmerSizes, dontIncreaseKmerSizesForCycles, numPruningSamples); assemblyEngine.setErrorCorrectKmers(errorCorrectKmers); assemblyEngine.setPruneFactor(MIN_PRUNE_FACTOR); @@ -549,11 +576,16 @@ public class HaplotypeCaller extends ActiveRegionWalker, In genotypingEngine = new GenotypingEngine( DEBUG, annotationEngine, USE_FILTERED_READ_MAP_FOR_ANNOTATIONS, variantMerger ); - if ( bamWriter != null ) + if ( bamWriter != null ) { + // we currently do not support multi-threaded BAM writing, so exception out + if ( getToolkit().getTotalNumberOfThreads() > 1 ) + throw new UserException.BadArgumentValue("bamout", "Currently cannot emit a BAM file from the HaplotypeCaller in multi-threaded mode."); haplotypeBAMWriter = HaplotypeBAMWriter.create(bamWriterType, bamWriter, getToolkit().getSAMFileHeader()); + } trimmer = new ActiveRegionTrimmer(DEBUG, PADDING_AROUND_SNPS_FOR_CALLING, PADDING_AROUND_OTHERS_FOR_CALLING, - MAX_GENOTYPING_ACTIVE_REGION_EXTENSION, getToolkit().getGenomeLocParser()); + UAC.GenotypingMode.equals(GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES) ? MAX_GGA_ACTIVE_REGION_EXTENSION : MAX_DISCOVERY_ACTIVE_REGION_EXTENSION, + getToolkit().getGenomeLocParser()); } //--------------------------------------------------------------------------------------------------------------- @@ -592,7 +624,6 @@ public class HaplotypeCaller extends ActiveRegionWalker, In if( UG_engine.getUAC().GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES ) { final VariantContext vcFromAllelesRod = UnifiedGenotyperEngine.getVCFromAllelesRod(tracker, ref, ref.getLocus(), false, logger, UG_engine.getUAC().alleles); if( vcFromAllelesRod != null ) { - allelesToGenotype.add(vcFromAllelesRod); // save for later for processing during the ActiveRegion's map call. Should be folded into a RefMetaDataTracker object return new ActivityProfileState(ref.getLocus(), 1.0); } } @@ -605,7 +636,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In // if we don't have any data, just abort early return new ActivityProfileState(ref.getLocus(), 0.0); - final List noCall = new ArrayList(); // used to noCall all genotypes until the exact model is applied + final List noCall = new ArrayList<>(); // used to noCall all genotypes until the exact model is applied noCall.add(Allele.NO_CALL); final Map splitContexts = AlignmentContextUtils.splitContextBySampleName(context); @@ -627,14 +658,14 @@ public class HaplotypeCaller extends ActiveRegionWalker, In } } genotypeLikelihoods[AA] += p.getRepresentativeCount() * QualityUtils.qualToProbLog10(qual); - genotypeLikelihoods[AB] += p.getRepresentativeCount() * MathUtils.approximateLog10SumLog10( QualityUtils.qualToProbLog10(qual) + LOG_ONE_HALF, QualityUtils.qualToErrorProbLog10(qual) + LOG_ONE_THIRD + LOG_ONE_HALF ); - genotypeLikelihoods[BB] += p.getRepresentativeCount() * QualityUtils.qualToErrorProbLog10(qual) + LOG_ONE_THIRD; + genotypeLikelihoods[AB] += p.getRepresentativeCount() * MathUtils.approximateLog10SumLog10( QualityUtils.qualToProbLog10(qual) + MathUtils.LOG_ONE_HALF, QualityUtils.qualToErrorProbLog10(qual) + MathUtils.LOG_ONE_THIRD + MathUtils.LOG_ONE_HALF ); + genotypeLikelihoods[BB] += p.getRepresentativeCount() * QualityUtils.qualToErrorProbLog10(qual) + MathUtils.LOG_ONE_THIRD; } } genotypes.add( new GenotypeBuilder(sample.getKey()).alleles(noCall).PL(genotypeLikelihoods).make() ); } - final List alleles = new ArrayList(); + final List alleles = new ArrayList<>(); alleles.add( FAKE_REF_ALLELE ); alleles.add( FAKE_ALT_ALLELE ); final VariantCallContext vcOut = UG_engine_simple_genotyper.calculateGenotypes(new VariantContextBuilder("HCisActive!", context.getContig(), context.getLocation().getStart(), context.getLocation().getStop(), alleles).genotypes(genotypes).make(), GenotypeLikelihoodsCalculationModel.Model.INDEL); @@ -660,12 +691,11 @@ public class HaplotypeCaller extends ActiveRegionWalker, In final List activeAllelesToGenotype = new ArrayList<>(); if( UG_engine.getUAC().GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES ) { - for( final VariantContext vc : allelesToGenotype ) { - if( originalActiveRegion.getLocation().overlapsP( getToolkit().getGenomeLocParser().createGenomeLoc(vc) ) ) { + for ( final VariantContext vc : metaDataTracker.getValues(UG_engine.getUAC().alleles) ) { + if ( vc.isNotFiltered() ) { activeAllelesToGenotype.add(vc); // do something with these VCs during GGA mode } } - allelesToGenotype.removeAll( activeAllelesToGenotype ); // No alleles found in this region so nothing to do! if ( activeAllelesToGenotype.isEmpty() ) { return NO_CALLS; } } else { @@ -680,7 +710,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In if (dontGenotype) return NO_CALLS; // user requested we not proceed // filter out reads from genotyping which fail mapping quality based criteria - final List filteredReads = filterNonPassingReads( assemblyResult.regionForGenotyping ); + final Collection filteredReads = filterNonPassingReads( assemblyResult.regionForGenotyping ); final Map> perSampleFilteredReadList = splitReadsBySample( filteredReads ); if( assemblyResult.regionForGenotyping.size() == 0 ) { return NO_CALLS; } // no reads remain after filtering so nothing else to do! @@ -689,23 +719,27 @@ public class HaplotypeCaller extends ActiveRegionWalker, In //logger.info("Computing read likelihoods with " + assemblyResult.regionForGenotyping.size() + " reads"); final Map stratifiedReadMap = likelihoodCalculationEngine.computeReadLikelihoods( assemblyResult.haplotypes, splitReadsBySample( assemblyResult.regionForGenotyping.getReads() ) ); - // subset down to only the best haplotypes to be genotyped in all samples ( in GGA mode use all discovered haplotypes ) - final List bestHaplotypes = selectBestHaplotypesForGenotyping(assemblyResult.haplotypes, stratifiedReadMap); + // Note: we used to subset down at this point to only the "best" haplotypes in all samples for genotyping, but there + // was a bad interaction between that selection and the marginalization that happens over each event when computing + // GLs. In particular, for samples that are heterozygous non-reference (B/C) the marginalization for B treats the + // haplotype containing C as reference (and vice versa). Now this is fine if all possible haplotypes are included + // in the genotyping, but we lose information if we select down to a few haplotypes. [EB] final GenotypingEngine.CalledHaplotypes calledHaplotypes = genotypingEngine.assignGenotypeLikelihoods( UG_engine, - bestHaplotypes, + assemblyResult.haplotypes, stratifiedReadMap, perSampleFilteredReadList, assemblyResult.fullReferenceWithPadding, assemblyResult.paddedReferenceLoc, assemblyResult.regionForGenotyping.getLocation(), getToolkit().getGenomeLocParser(), + metaDataTracker, activeAllelesToGenotype ); // TODO -- must disable if we are doing NCT, or set the output type of ! presorted if ( bamWriter != null ) { haplotypeBAMWriter.writeReadsAlignedToHaplotypes(assemblyResult.haplotypes, assemblyResult.paddedReferenceLoc, - bestHaplotypes, + assemblyResult.haplotypes, calledHaplotypes.getCalledHaplotypes(), stratifiedReadMap); } @@ -748,25 +782,57 @@ public class HaplotypeCaller extends ActiveRegionWalker, In // Create the reference haplotype which is the bases from the reference that make up the active region finalizeActiveRegion(activeRegion); // merge overlapping fragments, clip adapter and low qual tails - final Haplotype referenceHaplotype = new Haplotype(activeRegion.getActiveRegionReference(referenceReader), true); final byte[] fullReferenceWithPadding = activeRegion.getActiveRegionReference(referenceReader, REFERENCE_PADDING); final GenomeLoc paddedReferenceLoc = getPaddedLoc(activeRegion); + final Haplotype referenceHaplotype = createReferenceHaplotype(activeRegion, paddedReferenceLoc); - final List haplotypes = assemblyEngine.runLocalAssembly( activeRegion, referenceHaplotype, fullReferenceWithPadding, paddedReferenceLoc, activeAllelesToGenotype ); + // Create ReadErrorCorrector object if requested - will be used within assembly engine. + ReadErrorCorrector readErrorCorrector = null; + if (errorCorrectReads) + readErrorCorrector = new ReadErrorCorrector(kmerLengthForReadErrorCorrection, MIN_TAIL_QUALITY_WITH_ERROR_CORRECTION, minObservationsForKmerToBeSolid, DEBUG,fullReferenceWithPadding); - if ( ! dontTrimActiveRegions ) { - return trimActiveRegion(activeRegion, haplotypes, fullReferenceWithPadding, paddedReferenceLoc); - } else { - // we don't want to trim active regions, so go ahead and use the old one - return new AssemblyResult(haplotypes, activeRegion, fullReferenceWithPadding, paddedReferenceLoc, true); + try { + final List haplotypes = assemblyEngine.runLocalAssembly( activeRegion, referenceHaplotype, fullReferenceWithPadding, paddedReferenceLoc, activeAllelesToGenotype,readErrorCorrector ); + if ( ! dontTrimActiveRegions ) { + return trimActiveRegion(activeRegion, haplotypes, activeAllelesToGenotype, fullReferenceWithPadding, paddedReferenceLoc); + } else { + // we don't want to trim active regions, so go ahead and use the old one + return new AssemblyResult(haplotypes, activeRegion, fullReferenceWithPadding, paddedReferenceLoc, true); + } + } catch ( Exception e ) { + // Capture any exception that might be thrown, and write out the assembly failure BAM if requested + if ( captureAssemblyFailureBAM ) { + final SAMFileWriter writer = ReadUtils.createSAMFileWriterWithCompression(getToolkit().getSAMFileHeader(), true, "assemblyFailure.bam", 5); + for ( final GATKSAMRecord read : activeRegion.getReads() ) { + writer.addAlignment(read); + } + writer.close(); + } + throw e; } } + /** + * Helper function to create the reference haplotype out of the active region and a padded loc + * @param activeRegion the active region from which to generate the reference haplotype + * @param paddedReferenceLoc the GenomeLoc which includes padding and shows how big the reference haplotype should be + * @return a non-null haplotype + */ + private Haplotype createReferenceHaplotype(final ActiveRegion activeRegion, final GenomeLoc paddedReferenceLoc) { + final Haplotype refHaplotype = new Haplotype(activeRegion.getActiveRegionReference(referenceReader), true); + refHaplotype.setAlignmentStartHapwrtRef(activeRegion.getExtendedLoc().getStart() - paddedReferenceLoc.getStart()); + final Cigar c = new Cigar(); + c.add(new CigarElement(refHaplotype.getBases().length, CigarOperator.M)); + refHaplotype.setCigar(c); + return refHaplotype; + } + /** * Trim down the active region to just enough to properly genotype the events among the haplotypes * * @param originalActiveRegion our full active region * @param haplotypes the list of haplotypes we've created from assembly + * @param activeAllelesToGenotype additional alleles we might need to genotype (can be empty) * @param fullReferenceWithPadding the reference bases over the full padded location * @param paddedReferenceLoc the span of the reference bases * @return an AssemblyResult containing the trimmed active region with all of the reads we should use @@ -775,12 +841,14 @@ public class HaplotypeCaller extends ActiveRegionWalker, In */ private AssemblyResult trimActiveRegion(final ActiveRegion originalActiveRegion, final List haplotypes, + final List activeAllelesToGenotype, final byte[] fullReferenceWithPadding, final GenomeLoc paddedReferenceLoc) { if ( DEBUG ) logger.info("Trimming active region " + originalActiveRegion + " with " + haplotypes.size() + " haplotypes"); EventMap.buildEventMapsForHaplotypes(haplotypes, fullReferenceWithPadding, paddedReferenceLoc, DEBUG); final TreeSet allVariantsWithinFullActiveRegion = EventMap.getAllVariantContexts(haplotypes); + allVariantsWithinFullActiveRegion.addAll(activeAllelesToGenotype); final ActiveRegion trimmedActiveRegion = trimmer.trimRegion(originalActiveRegion, allVariantsWithinFullActiveRegion); if ( trimmedActiveRegion == null ) { @@ -790,7 +858,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In } // trim down the haplotypes - final Set haplotypeSet = new HashSet(haplotypes.size()); + final Set haplotypeSet = new HashSet<>(haplotypes.size()); for ( final Haplotype h : haplotypes ) { final Haplotype trimmed = h.trim(trimmedActiveRegion.getExtendedLoc()); if ( trimmed != null ) { @@ -801,7 +869,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In } // create the final list of trimmed haplotypes - final List trimmedHaplotypes = new ArrayList(haplotypeSet); + final List trimmedHaplotypes = new ArrayList<>(haplotypeSet); // sort haplotypes to take full advantage of haplotype start offset optimizations in PairHMM Collections.sort( trimmedHaplotypes, new HaplotypeBaseComparator() ); @@ -815,7 +883,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In // trim down the reads and add them to the trimmed active region - final List trimmedReads = new ArrayList(originalActiveRegion.getReads().size()); + final List trimmedReads = new ArrayList<>(originalActiveRegion.getReads().size()); for( final GATKSAMRecord read : originalActiveRegion.getReads() ) { final GATKSAMRecord clippedRead = ReadClipper.hardClipToRegion( read, trimmedActiveRegion.getExtendedLoc().getStart(), trimmedActiveRegion.getExtendedLoc().getStop() ); if( trimmedActiveRegion.readOverlapsRegion(clippedRead) && clippedRead.getReadLength() > 0 ) { @@ -828,21 +896,6 @@ public class HaplotypeCaller extends ActiveRegionWalker, In return new AssemblyResult(trimmedHaplotypes, trimmedActiveRegion, fullReferenceWithPadding, paddedReferenceLoc, true); } - /** - * Select the best N haplotypes according to their likelihoods, if appropriate - * - * @param haplotypes a list of haplotypes to consider - * @param stratifiedReadMap a map from samples -> read likelihoods - * @return the list of haplotypes to genotype - */ - protected List selectBestHaplotypesForGenotyping(final List haplotypes, final Map stratifiedReadMap) { - if ( UG_engine.getUAC().GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES ) { - return haplotypes; - } else { - return likelihoodCalculationEngine.selectBestHaplotypesFromEachSample(haplotypes, stratifiedReadMap, maxNumHaplotypesInPopulation); - } - } - //--------------------------------------------------------------------------------------------------------------- // // reduce @@ -857,8 +910,6 @@ public class HaplotypeCaller extends ActiveRegionWalker, In @Override public Integer reduce(List callsInRegion, Integer numCalledRegions) { for( final VariantContext call : callsInRegion ) { - // TODO -- uncomment this line once ART-based walkers have a proper RefMetaDataTracker. - // annotationEngine.annotateDBs(metaDataTracker, getToolkit().getGenomeLocParser().createGenomeLoc(call), call); vcfWriter.add( call ); } return (callsInRegion.isEmpty() ? 0 : 1) + numCalledRegions; @@ -866,6 +917,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In @Override public void onTraversalDone(Integer result) { + likelihoodCalculationEngine.close(); logger.info("Ran local assembly on " + result + " active regions"); } @@ -877,22 +929,19 @@ public class HaplotypeCaller extends ActiveRegionWalker, In private void finalizeActiveRegion( final ActiveRegion activeRegion ) { if( DEBUG ) { logger.info("Assembling " + activeRegion.getLocation() + " with " + activeRegion.size() + " reads: (with overlap region = " + activeRegion.getExtendedLoc() + ")"); } - final List finalizedReadList = new ArrayList<>(); - final FragmentCollection fragmentCollection = FragmentUtils.create( activeRegion.getReads() ); - activeRegion.clearReads(); - - // Join overlapping paired reads to create a single longer read - finalizedReadList.addAll( fragmentCollection.getSingletonReads() ); - for( final List overlappingPair : fragmentCollection.getOverlappingPairs() ) { - finalizedReadList.addAll( FragmentUtils.mergeOverlappingPairedFragments(overlappingPair) ); - } // Loop through the reads hard clipping the adaptor and low quality tails - final List readsToUse = new ArrayList<>(finalizedReadList.size()); - for( final GATKSAMRecord myRead : finalizedReadList ) { + final List readsToUse = new ArrayList<>(activeRegion.getReads().size()); + for( final GATKSAMRecord myRead : activeRegion.getReads() ) { final GATKSAMRecord postAdapterRead = ( myRead.getReadUnmappedFlag() ? myRead : ReadClipper.hardClipAdaptorSequence( myRead ) ); if( postAdapterRead != null && !postAdapterRead.isEmpty() && postAdapterRead.getCigar().getReadLength() > 0 ) { - GATKSAMRecord clippedRead = useLowQualityBasesForAssembly ? postAdapterRead : ReadClipper.hardClipLowQualEnds( postAdapterRead, MIN_TAIL_QUALITY ); + GATKSAMRecord clippedRead; + if (errorCorrectReads) + clippedRead = ReadClipper.hardClipLowQualEnds( postAdapterRead, MIN_TAIL_QUALITY_WITH_ERROR_CORRECTION ); + else if (useLowQualityBasesForAssembly) + clippedRead = postAdapterRead; + else // default case: clip low qual ends of reads + clippedRead= ReadClipper.hardClipLowQualEnds( postAdapterRead, MIN_TAIL_QUALITY ); if ( dontUseSoftClippedBases ) { // uncomment to remove hard clips from consideration at all @@ -914,20 +963,18 @@ public class HaplotypeCaller extends ActiveRegionWalker, In } } + activeRegion.clearReads(); activeRegion.addAll(DownsamplingUtils.levelCoverageByPosition(ReadUtils.sortReadsByCoordinate(readsToUse), maxReadsInRegionPerSample, minReadsPerAlignmentStart)); } - private List filterNonPassingReads( final org.broadinstitute.sting.utils.activeregion.ActiveRegion activeRegion ) { - final List readsToRemove = new ArrayList<>(); -// logger.info("Filtering non-passing regions: n incoming " + activeRegion.getReads().size()); + private Set filterNonPassingReads( final org.broadinstitute.sting.utils.activeregion.ActiveRegion activeRegion ) { + final Set readsToRemove = new LinkedHashSet<>(); for( final GATKSAMRecord rec : activeRegion.getReads() ) { if( rec.getReadLength() < MIN_READ_LENGTH || rec.getMappingQuality() < 20 || BadMateFilter.hasBadMate(rec) || (keepRG != null && !rec.getReadGroup().getId().equals(keepRG)) ) { readsToRemove.add(rec); -// logger.info("\tremoving read " + rec + " len " + rec.getReadLength()); } } activeRegion.removeAll( readsToRemove ); -// logger.info("Filtered non-passing regions: n remaining " + activeRegion.getReads().size()); return readsToRemove; } @@ -937,8 +984,8 @@ public class HaplotypeCaller extends ActiveRegionWalker, In return getToolkit().getGenomeLocParser().createGenomeLoc(activeRegion.getExtendedLoc().getContig(), padLeft, padRight); } - private Map> splitReadsBySample( final List reads ) { - final Map> returnMap = new HashMap>(); + private Map> splitReadsBySample( final Collection reads ) { + final Map> returnMap = new HashMap<>(); for( final String sample : samplesList) { List readList = returnMap.get( sample ); if( readList == null ) { @@ -954,4 +1001,4 @@ public class HaplotypeCaller extends ActiveRegionWalker, In } -} \ No newline at end of file +} diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/Kmer.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/Kmer.java index 745d4de06..2e757722b 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/Kmer.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/Kmer.java @@ -46,7 +46,11 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; +import com.google.java.contract.Requires; + import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; /** * Fast wrapper for byte[] kmers @@ -149,6 +153,15 @@ public class Kmer { return bases; } + /** + * Backdoor method for fast base peeking: avoids copying like bases() and doesn't modify internal state. + * Intended to be used for fast computation of neighboring kmers + * @return Reference to complete bases stores in this kmer + * WARNING: UNSAFE, caller should NEVER modify bases. Speed/safety tradeoff!! + */ + private byte[] unsafePeekAtBases() { + return bases; + } /** * Get a string representation of the bases of this kmer * @return a non-null string @@ -165,6 +178,45 @@ public class Kmer { return length; } + /** + * Gets a set of differing positions and bases from another k-mer, limiting up to a max distance. + * For example, if this = "ACATT" and other = "ACGGT": + * - if maxDistance < 2 then -1 will be returned, since distance between kmers is 2. + * - If maxDistance >=2, then 2 will be returned, and arrays will be filled as follows: + * differingIndeces = {2,3} + * differingBases = {'G','G'} + * @param other Other k-mer to test + * @param maxDistance Maximum distance to search. If this and other k-mers are beyond this Hamming distance, + * search is aborted and a null is returned + * @param differingIndeces Array with indices of differing bytes in array + * @param differingBases Actual differing bases + * @return Set of mappings of form (int->byte), where each elements represents index + * of k-mer array where bases mismatch, and the byte is the base from other kmer. + * If both k-mers differ by more than maxDistance, returns null + */ + @Requires({"other != null","differingIndeces != null","differingBases != null", + "differingIndeces.size>=maxDistance","differingBases.size>=maxDistance"}) + public int getDifferingPositions(final Kmer other, + final int maxDistance, + final int[] differingIndeces, + final byte[] differingBases) { + + + int dist = 0; + if (length == other.length()) { + final byte[] f2 = other.unsafePeekAtBases(); + for (int i=0; i < length; i++) + if(bases[start+i] != f2[i]) { + differingIndeces[dist] = i; + differingBases[dist++] = f2[i]; + if (dist > maxDistance) + return -1; + } + + } + return dist; + } + @Override public String toString() { return "Kmer{" + new String(bases()) + "}"; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java index 3cdad37ea..e6fa64e52 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java @@ -48,29 +48,38 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import com.google.java.contract.Ensures; import com.google.java.contract.Requires; +import net.sf.samtools.SAMUtils; import org.apache.log4j.Logger; -import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; -import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; -import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.QualityUtils; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; +import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.haplotype.HaplotypeScoreComparator; -import org.broadinstitute.sting.utils.pairhmm.*; +import org.broadinstitute.sting.utils.pairhmm.Log10PairHMM; +import org.broadinstitute.sting.utils.pairhmm.LoglessPairHMM; +import org.broadinstitute.sting.utils.pairhmm.CnyPairHMM; +import org.broadinstitute.sting.utils.pairhmm.BatchPairHMM; +import org.broadinstitute.sting.utils.pairhmm.PairHMM; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.sting.utils.sam.ReadUtils; import org.broadinstitute.variant.variantcontext.Allele; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.PrintStream; import java.util.*; public class LikelihoodCalculationEngine { private final static Logger logger = Logger.getLogger(LikelihoodCalculationEngine.class); - private static final double LOG_ONE_HALF = -Math.log10(2.0); private final byte constantGCP; private final double log10globalReadMismappingRate; private final boolean DEBUG; + private final PairHMM.HMM_IMPLEMENTATION hmmType; private final boolean noFpga; @@ -91,6 +100,10 @@ public class LikelihoodCalculationEngine { } }; + private final static boolean WRITE_LIKELIHOODS_TO_FILE = false; + private final static String LIKELIHOODS_FILENAME = "likelihoods.txt"; + private final PrintStream likelihoodsStream; + /** * The expected rate of random sequencing errors for a read originating from its true haplotype. * @@ -120,6 +133,16 @@ public class LikelihoodCalculationEngine { this.DEBUG = debug; this.log10globalReadMismappingRate = log10globalReadMismappingRate; this.noFpga = noFpga; + + if ( WRITE_LIKELIHOODS_TO_FILE ) { + try { + likelihoodsStream = new PrintStream(new FileOutputStream(new File(LIKELIHOODS_FILENAME))); + } catch ( FileNotFoundException e ) { + throw new RuntimeException(e); + } + } else { + likelihoodsStream = null; + } } public LikelihoodCalculationEngine( final byte constantGCP, final boolean debug, final PairHMM.HMM_IMPLEMENTATION hmmType, final double log10globalReadMismappingRate ) { @@ -130,6 +153,12 @@ public class LikelihoodCalculationEngine { this((byte)10, false, PairHMM.HMM_IMPLEMENTATION.LOGLESS_CACHING, -3, false); } + public void close() { + if ( likelihoodsStream != null ) likelihoodsStream.close(); + } + + + /** * Initialize our pairHMM with parameters appropriate to the haplotypes and reads we're going to evaluate * @@ -224,6 +253,17 @@ public class LikelihoodCalculationEngine { final double log10l = pairHMM.get().computeReadLikelihoodGivenHaplotypeLog10(haplotype.getBases(), read.getReadBases(), readQuals, readInsQuals, readDelQuals, overallGCP, isFirstHaplotype); + if ( WRITE_LIKELIHOODS_TO_FILE ) { + likelihoodsStream.printf("%s %s %s %s %s %s %f%n", + haplotype.getBaseString(), + new String(read.getReadBases()), + SAMUtils.phredToFastq(readQuals), + SAMUtils.phredToFastq(readInsQuals), + SAMUtils.phredToFastq(readDelQuals), + SAMUtils.phredToFastq(overallGCP), + log10l); + } + if ( haplotype.isNonReference() ) bestNonReflog10L = Math.max(bestNonReflog10L, log10l); else @@ -300,7 +340,7 @@ public class LikelihoodCalculationEngine { // Compute log10(10^x1/2 + 10^x2/2) = log10(10^x1+10^x2)-log10(2) // First term is approximated by Jacobian log with table lookup. haplotypeLikelihood += ReadUtils.getMeanRepresentativeReadCount( entry.getKey() ) * - ( MathUtils.approximateLog10SumLog10(entry.getValue().get(iii_allele), entry.getValue().get(jjj_allele)) + LOG_ONE_HALF ); + ( MathUtils.approximateLog10SumLog10(entry.getValue().get(iii_allele), entry.getValue().get(jjj_allele)) + MathUtils.LOG_ONE_HALF ); } } haplotypeLikelihoodMatrix[iii][jjj] = haplotypeLikelihood; @@ -398,11 +438,11 @@ public class LikelihoodCalculationEngine { if ( haplotypes.size() == 2 ) return haplotypes; // fast path -- we'll always want to use 2 haplotypes // all of the haplotypes that at least one sample called as one of the most likely - final Set selectedHaplotypes = new HashSet(); + final Set selectedHaplotypes = new HashSet<>(); selectedHaplotypes.add(findReferenceHaplotype(haplotypes)); // ref is always one of the selected // our annoying map from allele -> haplotype - final Map allele2Haplotype = new HashMap(); + final Map allele2Haplotype = new HashMap<>(); for ( final Haplotype h : haplotypes ) { h.setScore(h.isReference() ? Double.MAX_VALUE : 0.0); // set all of the scores to 0 (lowest value) for all non-ref haplotypes allele2Haplotype.put(Allele.create(h, h.isReference()), h); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java index 20b005b40..c889d7995 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java @@ -78,6 +78,10 @@ import java.util.*; public abstract class LocalAssemblyEngine { private final static Logger logger = Logger.getLogger(LocalAssemblyEngine.class); + /** + * If false, we will only write out a region around the reference source + */ + private final static boolean PRINT_FULL_GRAPH_FOR_DEBUGGING = true; public static final byte DEFAULT_MIN_BASE_QUALITY_TO_USE = (byte) 8; private static final int MIN_HAPLOTYPE_REFERENCE_LENGTH = 30; @@ -111,7 +115,11 @@ public abstract class LocalAssemblyEngine { * @param refHaplotype the reference haplotype * @return a non-null list of reads */ - protected abstract List assemble(List reads, Haplotype refHaplotype); + protected abstract List assemble(List reads, Haplotype refHaplotype, List activeAlleleHaplotypes); + + protected List assemble(List reads, Haplotype refHaplotype) { + return assemble(reads, refHaplotype, Collections.emptyList()); + } /** * Main entry point into the assembly engine. Build a set of deBruijn graphs out of the provided reference sequence and list of reads @@ -120,16 +128,37 @@ public abstract class LocalAssemblyEngine { * @param fullReferenceWithPadding byte array holding the reference sequence with padding * @param refLoc GenomeLoc object corresponding to the reference sequence with padding * @param activeAllelesToGenotype the alleles to inject into the haplotypes during GGA mode + * @param readErrorCorrector a ReadErrorCorrector object, if read are to be corrected before assembly. Can be null if no error corrector is to be used. * @return a non-empty list of all the haplotypes that are produced during assembly */ - public List runLocalAssembly(ActiveRegion activeRegion, Haplotype refHaplotype, byte[] fullReferenceWithPadding, GenomeLoc refLoc, List activeAllelesToGenotype) { + public List runLocalAssembly(final ActiveRegion activeRegion, + final Haplotype refHaplotype, + final byte[] fullReferenceWithPadding, + final GenomeLoc refLoc, + final List activeAllelesToGenotype, + final ReadErrorCorrector readErrorCorrector) { if( activeRegion == null ) { throw new IllegalArgumentException("Assembly engine cannot be used with a null ActiveRegion."); } if( refHaplotype == null ) { throw new IllegalArgumentException("Reference haplotype cannot be null."); } if( fullReferenceWithPadding.length != refLoc.size() ) { throw new IllegalArgumentException("Reference bases and reference loc must be the same size."); } if( pruneFactor < 0 ) { throw new IllegalArgumentException("Pruning factor cannot be negative"); } + // create the list of artificial haplotypes that should be added to the graph for GGA mode + final List activeAlleleHaplotypes = createActiveAlleleHaplotypes(refHaplotype, activeAllelesToGenotype, activeRegion.getExtendedLoc()); + + + // error-correct reads before clipping low-quality tails: some low quality bases might be good and we want to recover them + final List correctedReads; + if (readErrorCorrector != null) { + // now correct all reads in active region after filtering/downsampling + // Note that original reads in active region are NOT modified by default, since they will be used later for GL computation, + // and we only want the read-error corrected reads for graph building. + readErrorCorrector.addReadsToKmers(activeRegion.getReads()); + correctedReads = new ArrayList<>(readErrorCorrector.correctReads(activeRegion.getReads())); + } + else correctedReads = activeRegion.getReads(); + // create the graphs by calling our subclass assemble method - final List graphs = assemble(activeRegion.getReads(), refHaplotype); + final List graphs = assemble(correctedReads, refHaplotype, activeAlleleHaplotypes); // do some QC on the graphs for ( final SeqGraph graph : graphs ) { sanityCheckGraph(graph, refHaplotype); } @@ -138,45 +167,53 @@ public abstract class LocalAssemblyEngine { if ( graphWriter != null ) { printGraphs(graphs); } // find the best paths in the graphs and return them as haplotypes - return findBestPaths( graphs, refHaplotype, fullReferenceWithPadding, refLoc, activeAllelesToGenotype, activeRegion.getExtendedLoc() ); + return findBestPaths( graphs, refHaplotype, refLoc, activeRegion.getExtendedLoc() ); } - @Requires({"refWithPadding.length > refHaplotype.getBases().length", "refLoc.containsP(activeRegionWindow)"}) - @Ensures({"result.contains(refHaplotype)"}) - protected List findBestPaths(final List graphs, final Haplotype refHaplotype, final byte[] refWithPadding, final GenomeLoc refLoc, final List activeAllelesToGenotype, final GenomeLoc activeRegionWindow) { - // add the reference haplotype separately from all the others to ensure that it is present in the list of haplotypes - final Set returnHaplotypes = new LinkedHashSet(); - refHaplotype.setAlignmentStartHapwrtRef(activeRegionWindow.getStart() - refLoc.getStart()); - final Cigar c = new Cigar(); - c.add(new CigarElement(refHaplotype.getBases().length, CigarOperator.M)); - refHaplotype.setCigar(c); - returnHaplotypes.add( refHaplotype ); - + /** + * Create the list of artificial GGA-mode haplotypes by injecting each of the provided alternate alleles into the reference haplotype + * @param refHaplotype the reference haplotype + * @param activeAllelesToGenotype the list of alternate alleles in VariantContexts + * @param activeRegionWindow the window containing the reference haplotype + * @return a non-null list of haplotypes + */ + private List createActiveAlleleHaplotypes(final Haplotype refHaplotype, final List activeAllelesToGenotype, final GenomeLoc activeRegionWindow) { + final Set returnHaplotypes = new LinkedHashSet<>(); final int activeRegionStart = refHaplotype.getAlignmentStartHapwrtRef(); - final int activeRegionStop = refHaplotype.getAlignmentStartHapwrtRef() + refHaplotype.getCigar().getReferenceLength(); - // for GGA mode, add the desired allele into the haplotype for( final VariantContext compVC : activeAllelesToGenotype ) { for( final Allele compAltAllele : compVC.getAlternateAlleles() ) { final Haplotype insertedRefHaplotype = refHaplotype.insertAllele(compVC.getReference(), compAltAllele, activeRegionStart + compVC.getStart() - activeRegionWindow.getStart(), compVC.getStart()); - addHaplotypeForGGA( insertedRefHaplotype, refWithPadding, returnHaplotypes, activeRegionStart, activeRegionStop, true ); + if( insertedRefHaplotype != null ) { // can be null if the requested allele can't be inserted into the haplotype + returnHaplotypes.add(insertedRefHaplotype); + } } } + return new ArrayList<>(returnHaplotypes); + } + + @Ensures({"result.contains(refHaplotype)"}) + protected List findBestPaths(final List graphs, final Haplotype refHaplotype, final GenomeLoc refLoc, final GenomeLoc activeRegionWindow) { + // add the reference haplotype separately from all the others to ensure that it is present in the list of haplotypes + final Set returnHaplotypes = new LinkedHashSet<>(); + returnHaplotypes.add( refHaplotype ); + + final int activeRegionStart = refHaplotype.getAlignmentStartHapwrtRef(); + for( final SeqGraph graph : graphs ) { final SeqVertex source = graph.getReferenceSourceVertex(); final SeqVertex sink = graph.getReferenceSinkVertex(); if ( source == null || sink == null ) throw new IllegalArgumentException("Both source and sink cannot be null but got " + source + " and sink " + sink + " for graph "+ graph); - final KBestPaths pathFinder = new KBestPaths(allowCyclesInKmerGraphToGeneratePaths); + final KBestPaths pathFinder = new KBestPaths<>(allowCyclesInKmerGraphToGeneratePaths); for ( final Path path : pathFinder.getKBestPaths(graph, numBestHaplotypesPerGraph, source, sink) ) { -// logger.info("Found path " + path); Haplotype h = new Haplotype( path.getBases() ); if( !returnHaplotypes.contains(h) ) { final Cigar cigar = path.calculateCigar(refHaplotype.getBases()); if ( cigar == null ) { - // couldn't produce a meaningful alignment of haplotype to reference, fail quitely + // couldn't produce a meaningful alignment of haplotype to reference, fail quietly continue; } else if( cigar.isEmpty() ) { throw new IllegalStateException("Smith-Waterman alignment failure. Cigar = " + cigar + " with reference length " + cigar.getReferenceLength() + @@ -196,26 +233,7 @@ public abstract class LocalAssemblyEngine { returnHaplotypes.add(h); if ( debug ) - logger.info("Adding haplotype " + h.getCigar() + " from debruijn graph with kmer " + graph.getKmerSize()); - - // for GGA mode, add the desired allele into the haplotype if it isn't already present - if( !activeAllelesToGenotype.isEmpty() ) { - final Map eventMap = GenotypingEngine.generateVCsFromAlignment( h, refWithPadding, refLoc, "HCassembly" ); // BUGBUG: need to put this function in a shared place - for( final VariantContext compVC : activeAllelesToGenotype ) { // for GGA mode, add the desired allele into the haplotype if it isn't already present - final VariantContext vcOnHaplotype = eventMap.get(compVC.getStart()); - - // This if statement used to additionally have: - // "|| !vcOnHaplotype.hasSameAllelesAs(compVC)" - // but that can lead to problems downstream when e.g. you are injecting a 1bp deletion onto - // a haplotype that already contains a 1bp insertion (so practically it is reference but - // falls into the bin for the 1bp deletion because we keep track of the artificial alleles). - if( vcOnHaplotype == null ) { - for( final Allele compAltAllele : compVC.getAlternateAlleles() ) { - addHaplotypeForGGA( h.insertAllele(compVC.getReference(), compAltAllele, activeRegionStart + compVC.getStart() - activeRegionWindow.getStart(), compVC.getStart()), refWithPadding, returnHaplotypes, activeRegionStart, activeRegionStop, false ); - } - } - } - } + logger.info("Adding haplotype " + h.getCigar() + " from graph with kmer " + graph.getKmerSize()); } } } @@ -238,7 +256,7 @@ public abstract class LocalAssemblyEngine { } } - return new ArrayList(returnHaplotypes); + return new ArrayList<>(returnHaplotypes); } /** @@ -257,84 +275,25 @@ public abstract class LocalAssemblyEngine { } /** - * Take a haplotype which was generated by injecting an allele into a string of bases and run SW against the reference to determine the variants on the haplotype. - * Unfortunately since this haplotype didn't come from the assembly graph you can't straightforwardly use the bubble traversal algorithm to get this information. - * This is a target for future work as we rewrite the HaplotypeCaller to be more bubble-caller based. - * @param haplotype the candidate haplotype - * @param ref the reference bases to align against - * @param haplotypeList the current list of haplotypes - * @param activeRegionStart the start of the active region in the reference byte array - * @param activeRegionStop the stop of the active region in the reference byte array - * @param FORCE_INCLUSION_FOR_GGA_MODE if true will include in the list even if it already exists - * @return true if the candidate haplotype was successfully incorporated into the haplotype list + * Print graph to file if debugGraphTransformations is enabled + * @param graph the graph to print + * @param file the destination file */ - @Requires({"ref != null", "ref.length >= activeRegionStop - activeRegionStart"}) - private boolean addHaplotypeForGGA( final Haplotype haplotype, final byte[] ref, final Set haplotypeList, final int activeRegionStart, final int activeRegionStop, final boolean FORCE_INCLUSION_FOR_GGA_MODE ) { - if( haplotype == null ) { return false; } - - final SWPairwiseAlignment swConsensus = new SWPairwiseAlignment( ref, haplotype.getBases(), SWParameterSet.STANDARD_NGS ); - haplotype.setAlignmentStartHapwrtRef( swConsensus.getAlignmentStart2wrt1() ); - - if( swConsensus.getCigar().toString().contains("S") || swConsensus.getCigar().getReferenceLength() < 60 || swConsensus.getAlignmentStart2wrt1() < 0 ) { // protect against unhelpful haplotype alignments - return false; - } - - haplotype.setCigar( AlignmentUtils.leftAlignIndel(swConsensus.getCigar(), ref, haplotype.getBases(), swConsensus.getAlignmentStart2wrt1(), 0, true) ); - - final int hapStart = ReadUtils.getReadCoordinateForReferenceCoordinate(haplotype.getAlignmentStartHapwrtRef(), haplotype.getCigar(), activeRegionStart, ReadUtils.ClippingTail.LEFT_TAIL, true); - int hapStop = ReadUtils.getReadCoordinateForReferenceCoordinate( haplotype.getAlignmentStartHapwrtRef(), haplotype.getCigar(), activeRegionStop, ReadUtils.ClippingTail.RIGHT_TAIL, true ); - if( hapStop == ReadUtils.CLIPPING_GOAL_NOT_REACHED && activeRegionStop == haplotype.getAlignmentStartHapwrtRef() + haplotype.getCigar().getReferenceLength() ) { - hapStop = activeRegionStop; // contract for getReadCoordinateForReferenceCoordinate function says that if read ends at boundary then it is outside of the clipping goal - } - byte[] newHaplotypeBases; - // extend partial haplotypes to contain the full active region sequence - if( hapStart == ReadUtils.CLIPPING_GOAL_NOT_REACHED && hapStop == ReadUtils.CLIPPING_GOAL_NOT_REACHED ) { - newHaplotypeBases = ArrayUtils.addAll(ArrayUtils.addAll(ArrayUtils.subarray(ref, activeRegionStart, swConsensus.getAlignmentStart2wrt1()), - haplotype.getBases()), - ArrayUtils.subarray(ref, swConsensus.getAlignmentStart2wrt1() + swConsensus.getCigar().getReferenceLength(), activeRegionStop)); - } else if( hapStart == ReadUtils.CLIPPING_GOAL_NOT_REACHED ) { - newHaplotypeBases = ArrayUtils.addAll( ArrayUtils.subarray(ref, activeRegionStart, swConsensus.getAlignmentStart2wrt1()), ArrayUtils.subarray(haplotype.getBases(), 0, hapStop) ); - } else if( hapStop == ReadUtils.CLIPPING_GOAL_NOT_REACHED ) { - newHaplotypeBases = ArrayUtils.addAll( ArrayUtils.subarray(haplotype.getBases(), hapStart, haplotype.getBases().length), ArrayUtils.subarray(ref, swConsensus.getAlignmentStart2wrt1() + swConsensus.getCigar().getReferenceLength(), activeRegionStop) ); - } else { - newHaplotypeBases = ArrayUtils.subarray(haplotype.getBases(), hapStart, hapStop); - } - - final Haplotype h = new Haplotype( newHaplotypeBases ); - final SWPairwiseAlignment swConsensus2 = new SWPairwiseAlignment( ref, h.getBases(), SWParameterSet.STANDARD_NGS ); - - h.setAlignmentStartHapwrtRef( swConsensus2.getAlignmentStart2wrt1() ); - if ( haplotype.isArtificialHaplotype() ) { - h.setArtificialEvent(haplotype.getArtificialEvent()); - } - if( swConsensus2.getCigar().toString().contains("S") || swConsensus2.getCigar().getReferenceLength() != activeRegionStop - activeRegionStart || swConsensus2.getAlignmentStart2wrt1() < 0 ) { // protect against unhelpful haplotype alignments - return false; - } - - h.setCigar( AlignmentUtils.leftAlignIndel(swConsensus2.getCigar(), ref, h.getBases(), swConsensus2.getAlignmentStart2wrt1(), 0, true) ); - - if( FORCE_INCLUSION_FOR_GGA_MODE || !haplotypeList.contains(h) ) { - haplotypeList.add(h); - return true; - } else { - return false; + protected void printDebugGraphTransform(final BaseGraph graph, final File file) { + if ( debugGraphTransformations ) { + if ( PRINT_FULL_GRAPH_FOR_DEBUGGING ) + graph.printGraph(file, pruneFactor); + else + graph.subsetToRefSource().printGraph(file, pruneFactor); } } protected SeqGraph cleanupSeqGraph(final SeqGraph seqGraph) { - if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.1.dot"), pruneFactor); + printDebugGraphTransform(seqGraph, new File("sequenceGraph.1.dot")); - // TODO -- we need to come up with a consistent pruning algorithm. The current pruning algorithm - // TODO -- works well but it doesn't differentiate between an isolated chain that doesn't connect - // TODO -- to anything from one that's actually has good support along the chain but just happens - // TODO -- to have a connection in the middle that has weight of < pruneFactor. Ultimately - // TODO -- the pruning algorithm really should be an error correction algorithm that knows more - // TODO -- about the structure of the data and can differentiate between an infrequent path but - // TODO -- without evidence against it (such as occurs when a region is hard to get any reads through) - // TODO -- from a error with lots of weight going along another similar path // the very first thing we need to do is zip up the graph, or pruneGraph will be too aggressive seqGraph.zipLinearChains(); - if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.2.zipped.dot"), pruneFactor); + printDebugGraphTransform(seqGraph, new File("sequenceGraph.2.zipped.dot")); // now go through and prune the graph, removing vertices no longer connected to the reference chain // IMPORTANT: pruning must occur before we call simplifyGraph, as simplifyGraph adds 0 weight @@ -342,9 +301,9 @@ public abstract class LocalAssemblyEngine { seqGraph.pruneGraph(pruneFactor); seqGraph.removeVerticesNotConnectedToRefRegardlessOfEdgeDirection(); - if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.3.pruned.dot"), pruneFactor); + printDebugGraphTransform(seqGraph, new File("sequenceGraph.3.pruned.dot")); seqGraph.simplifyGraph(); - if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.4.merged.dot"), pruneFactor); + printDebugGraphTransform(seqGraph, new File("sequenceGraph.4.merged.dot")); // The graph has degenerated in some way, so the reference source and/or sink cannot be id'd. Can // happen in cases where for example the reference somehow manages to acquire a cycle, or @@ -363,7 +322,7 @@ public abstract class LocalAssemblyEngine { seqGraph.addVertex(dummy); seqGraph.addEdge(complete, dummy, new BaseEdge(true, 0)); } - if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.5.final.dot"), pruneFactor); + printDebugGraphTransform(seqGraph, new File("sequenceGraph.5.final.dot")); return seqGraph; } @@ -372,7 +331,6 @@ public abstract class LocalAssemblyEngine { * Perform general QC on the graph to make sure something hasn't gone wrong during assembly * @param graph the graph to check * @param refHaplotype the reference haplotype - * @param */ private void sanityCheckGraph(final BaseGraph graph, final Haplotype refHaplotype) { sanityCheckReferenceGraph(graph, refHaplotype); @@ -383,7 +341,6 @@ public abstract class LocalAssemblyEngine { * * @param graph the graph to check * @param refHaplotype the reference haplotype - * @param */ private void sanityCheckReferenceGraph(final BaseGraph graph, final Haplotype refHaplotype) { if( graph.getReferenceSourceVertex() == null ) { diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrector.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrector.java new file mode 100644 index 000000000..e1471ab33 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrector.java @@ -0,0 +1,526 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller; + +import com.google.java.contract.Requires; +import org.apache.log4j.Logger; +import org.broadinstitute.sting.utils.BaseUtils; +import org.broadinstitute.sting.utils.QualityUtils; +import org.broadinstitute.sting.utils.clipping.ReadClipper; +import org.broadinstitute.sting.utils.collections.Pair; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; + +import java.util.*; + +/** + * Utility class that error-corrects reads. + * Main idea: An error in a read will appear as a bubble in a k-mer (de Bruijn) graph and such bubble will have very low multiplicity. + * Hence, read errors will appear as "sparse" kmers with very little support. + * Historically, the most common approach to error-correct reads before assembly has been to first compute the kmer spectrum of the reads, + * defined as the kmer composition of a set of reads along with the multiplicity of each kmer. + * First-generation correctors like the Euler corrector (Pevzner 2001) mapped low frequency kmers (kmers appearing say below N times) + * into high frequency ones that lied within a certain Hamming or edit distance. + * This is doable, but has some drawbacks: + * - Kmers used for error correction become tied to kmers used for graph building. + * - Hence, large kmers (desireable for graph building because they can resolve repeats better) are a hindrance for error correction, + * because they are seen less often. + * - After error correction, there is no guarantee that a sequence of kmers corresponds to an "actual" read. + * + * An error-corrected set of reads also makes a much smoother graph without the need to resolving so many bubbles. + * + * Idea hence is to correct reads based on their kmer content, but in a context independent from graph building. + * In order to do this, the following steps are taken: + * - The k-mer spectrum of a set of reads in computed. However, we are at freedom to choose the most convenient k-mer size (typicially around + * read length /2). + * - We partition the set of observed k-mers into "solid" kmers which have multiplicity > M, and "insolid" ones otherwise (Pevzner 2001). + * + * - Main idea of the algorithm is to try to substitute a sequence of bases in a read by a sequence better supported by kmers. + * - For each "unsolid" kmer observed in reads, we try to find a "solid" kmer within a maximum Hamming distance. + * - If such solid kmer exists, then this unsolid kmer is "correctable", otherwise, uncorrectable. + * - For each read, then: + * -- Walk through read and visit all kmers. + * -- If kmer is solid, continue to next kmer. + * -- If not, and if it's correctable (i.e. there exists a mapping from an unsolid kmer to a solid kmer within a given Hamming distance), + * add the bases and offsets corresponding to differing positions between unsolid and solid kmer to correction list. + * -- At the end, each base in read will have a list of corrections associated with it. We can then choose to correct or not. + * If read has only consistent corrections, then we can correct base to common base in corrections. + * + * TODO: + * todo Q: WHAT QUALITY TO USE?? + * todo how do we deal with mate pairs? + * + * + + + */ +public class ReadErrorCorrector { + private final static Logger logger = Logger.getLogger(ReadErrorCorrector.class); + /** + * A map of for each kmer to its num occurrences in addKmers + */ + KMerCounter countsByKMer; + + Map kmerCorrectionMap = new HashMap<>(); + Map> kmerDifferingBases = new HashMap<>(); + private final int kmerLength; + private final boolean debug; + private final boolean trimLowQualityBases; + private final byte minTailQuality; + private final int maxMismatchesToCorrect; + private final byte qualityOfCorrectedBases; + private final int maxObservationsForKmerToBeCorrectable; + private final int maxHomopolymerLengthInRegion; + private final int minObservationsForKmerToBeSolid; + + // default values, for debugging + private final static boolean doInplaceErrorCorrection = false; // currently not used, since we want corrected reads to be used only for assembly + private final static int MAX_MISMATCHES_TO_CORRECT = 2; + private final static byte QUALITY_OF_CORRECTED_BASES = 30; // what's a reasonable value here? + private final static int MAX_OBSERVATIONS_FOR_KMER_TO_BE_CORRECTABLE = 1; + private final static boolean TRIM_LOW_QUAL_TAILS = false; + private final static boolean DONT_CORRECT_IN_LONG_HOMOPOLYMERS = false; + private final static int MAX_HOMOPOLYMER_THRESHOLD = 12; + + // debug counter structure + private final ReadErrorCorrectionStats readErrorCorrectionStats = new ReadErrorCorrectionStats(); + + /** + * Create a new kmer corrector + * + * @param kmerLength the length of kmers we'll be counting to error correct, must be >= 1 + * @param maxMismatchesToCorrect e >= 0 + * @param qualityOfCorrectedBases Bases to be corrected will be assigned this quality + */ + public ReadErrorCorrector(final int kmerLength, + final int maxMismatchesToCorrect, + final int maxObservationsForKmerToBeCorrectable, + final byte qualityOfCorrectedBases, + final int minObservationsForKmerToBeSolid, + final boolean trimLowQualityBases, + final byte minTailQuality, + final boolean debug, + final byte[] fullReferenceWithPadding) { + if ( kmerLength < 1 ) throw new IllegalArgumentException("kmerLength must be > 0 but got " + kmerLength); + if ( maxMismatchesToCorrect < 1 ) + throw new IllegalArgumentException("maxMismatchesToCorrect must be >= 1 but got " + maxMismatchesToCorrect); + if ( qualityOfCorrectedBases < 2 || qualityOfCorrectedBases > QualityUtils.MAX_REASONABLE_Q_SCORE) + throw new IllegalArgumentException("qualityOfCorrectedBases must be >= 2 and <= MAX_REASONABLE_Q_SCORE but got " + qualityOfCorrectedBases); + + countsByKMer = new KMerCounter(kmerLength); + this.kmerLength = kmerLength; + this.maxMismatchesToCorrect = maxMismatchesToCorrect; + this.qualityOfCorrectedBases = qualityOfCorrectedBases; + this.minObservationsForKmerToBeSolid = minObservationsForKmerToBeSolid; + this.trimLowQualityBases = trimLowQualityBases; + this.minTailQuality = minTailQuality; + this.debug = debug; + this.maxObservationsForKmerToBeCorrectable = maxObservationsForKmerToBeCorrectable; + + // when region has long homopolymers, we may want not to correct reads, since assessment is complicated, + // so we may decide to skip error correction in these regions + maxHomopolymerLengthInRegion = computeMaxHLen(fullReferenceWithPadding); + } + + /** + * Simple constructor with sensible defaults + * @param kmerLength K-mer length for error correction (not necessarily the same as for assembly graph) + * @param minTailQuality Minimum tail quality: remaining bases with Q's below this value are hard-clipped after correction + * @param debug Output debug information + */ + public ReadErrorCorrector(final int kmerLength, final byte minTailQuality, final int minObservationsForKmerToBeSolid, final boolean debug,final byte[] fullReferenceWithPadding) { + this(kmerLength, MAX_MISMATCHES_TO_CORRECT, MAX_OBSERVATIONS_FOR_KMER_TO_BE_CORRECTABLE, QUALITY_OF_CORRECTED_BASES, minObservationsForKmerToBeSolid, TRIM_LOW_QUAL_TAILS, minTailQuality, debug,fullReferenceWithPadding); + } + + /** + * Main entry routine to add all kmers in a read to the read map counter + * @param read Read to add bases + */ + @Requires("read != null") + protected void addReadKmers(final GATKSAMRecord read) { + if (DONT_CORRECT_IN_LONG_HOMOPOLYMERS && maxHomopolymerLengthInRegion > MAX_HOMOPOLYMER_THRESHOLD) + return; + + final byte[] readBases = read.getReadBases(); + for (int offset = 0; offset <= readBases.length-kmerLength; offset++ ) { + countsByKMer.addKmer(new Kmer(readBases,offset,kmerLength),1); + + } + } + + /** + * Correct a collection of reads based on stored k-mer counts + * @param reads + */ + public final List correctReads(final Collection reads) { + + final List correctedReads = new ArrayList<>(reads.size()); + if (DONT_CORRECT_IN_LONG_HOMOPOLYMERS && maxHomopolymerLengthInRegion > MAX_HOMOPOLYMER_THRESHOLD) { + // just copy reads into output and exit + correctedReads.addAll(reads); + } + else { + computeKmerCorrectionMap(); + for (final GATKSAMRecord read: reads) { + final GATKSAMRecord correctedRead = correctRead(read); + if (trimLowQualityBases) + correctedReads.add(ReadClipper.hardClipLowQualEnds(correctedRead, minTailQuality)); + else + correctedReads.add(correctedRead); + } + if (debug) { + logger.info("Number of corrected bases:"+readErrorCorrectionStats.numBasesCorrected); + logger.info("Number of corrected reads:"+readErrorCorrectionStats.numReadsCorrected); + logger.info("Number of skipped reads:"+readErrorCorrectionStats.numReadsUncorrected); + logger.info("Number of solid kmers:"+readErrorCorrectionStats.numSolidKmers); + logger.info("Number of corrected kmers:"+readErrorCorrectionStats.numCorrectedKmers); + logger.info("Number of uncorrectable kmers:"+readErrorCorrectionStats.numUncorrectableKmers); + } + } + return correctedReads; + } + + + /** + * Do actual read correction based on k-mer map. First, loop through stored k-mers to get a list of possible corrections + * for each position in the read. Then correct read based on all possible consistent corrections. + * @param inputRead Read to correct + * @return Corrected read (can be same reference as input if doInplaceErrorCorrection is set) + */ + @Requires("inputRead != null") + private GATKSAMRecord correctRead(final GATKSAMRecord inputRead) { + // no support for reduced reads (which shouldn't need to be error-corrected anyway!) + if (inputRead.isReducedRead()) + return inputRead; + + // do actual correction + boolean corrected = false; + final byte[] correctedBases = inputRead.getReadBases(); + final byte[] correctedQuals = inputRead.getBaseQualities(); + + // array to store list of possible corrections for read + final CorrectionSet correctionSet = buildCorrectionMap(correctedBases); + + for (int offset = 0; offset < correctedBases.length; offset++) { + final Byte b = correctionSet.getConsensusCorrection(offset); + if (b != null && b != correctedBases[offset]) { + correctedBases[offset] = b; + correctedQuals[offset] = qualityOfCorrectedBases; + corrected = true; + } + readErrorCorrectionStats.numBasesCorrected++; + } + + if (corrected) { + readErrorCorrectionStats.numReadsCorrected++; + if (doInplaceErrorCorrection) { + inputRead.setReadBases(correctedBases); + inputRead.setBaseQualities(correctedQuals); + return inputRead; + } + else { + GATKSAMRecord correctedRead = new GATKSAMRecord(inputRead); + + // do the actual correction + // todo - do we need to clone anything else from read? + correctedRead.setBaseQualities(inputRead.getBaseQualities()); + correctedRead.setIsStrandless(inputRead.isStrandless()); + correctedRead.setReadBases(inputRead.getReadBases()); + correctedRead.setReadString(inputRead.getReadString()); + correctedRead.setReadGroup(inputRead.getReadGroup()); + return correctedRead; + } + } + else { + readErrorCorrectionStats.numReadsUncorrected++; + return inputRead; + } + } + + /** + * Build correction map for each of the bases in read. + * For each of the constituent kmers in read: + * a) See whether the kmer has been mapped to a corrected kmer. + * b) If so, get list of differing positions and corresponding bases. + * c) Add then list of new bases to index in correction list. + * Correction list is of read size, and holds a list of bases to correct. + * @param correctedBases Bases to attempt to correct + * @return CorrectionSet object. + */ + @Requires("correctedBases != null") + private CorrectionSet buildCorrectionMap(final byte[] correctedBases) { + // array to store list of possible corrections for read + final CorrectionSet correctionSet = new CorrectionSet(correctedBases.length); + + for (int offset = 0; offset <= correctedBases.length-kmerLength; offset++ ) { + final Kmer kmer = new Kmer(correctedBases,offset,kmerLength); + final Kmer newKmer = kmerCorrectionMap.get(kmer); + if (newKmer != null && !newKmer.equals(kmer)){ + final Pair differingPositions = kmerDifferingBases.get(kmer); + final int[] differingIndeces = differingPositions.first; + final byte[] differingBases = differingPositions.second; + + for (int k=0; k < differingIndeces.length; k++) { + // get list of differing positions for corrected kmer + // for each of these, add correction candidate to correction set + correctionSet.add(offset + differingIndeces[k],differingBases[k]); + } + } + } + return correctionSet; + } + + + /** + * Top-level entry point that adds a collection of reads to our kmer list. + * For each read in list, its constituent kmers will be logged in our kmer table. + * @param reads + */ + @Requires("reads != null") + public void addReadsToKmers(final Collection reads) { + for (final GATKSAMRecord read: reads) + addReadKmers(read); + + if (debug) + for ( final KMerCounter.CountedKmer countedKmer: countsByKMer.getCountedKmers() ) + logger.info(String.format("%s\t%d\n", countedKmer.kmer, countedKmer.count)); + } + + + /** + * For each kmer we've seen, do the following: + * a) If kmer count > threshold1, this kmer is good, so correction map will be to itself. + * b) If kmer count <= threshold2, this kmer is bad. + * In that case, loop through all other kmers. If kmer is good, compute distance, and get minimal distance. + * If such distance is < some threshold, map to this kmer, and record differing positions and bases. + * + */ + private void computeKmerCorrectionMap() { + for (final KMerCounter.CountedKmer storedKmer : countsByKMer.getCountedKmers()) { + if (storedKmer.getCount() >= minObservationsForKmerToBeSolid) { + // this kmer is good: map to itself + kmerCorrectionMap.put(storedKmer.getKmer(),storedKmer.getKmer()); + kmerDifferingBases.put(storedKmer.getKmer(),new Pair<>(new int[0],new byte[0])); // dummy empty array + readErrorCorrectionStats.numSolidKmers++; + } + else if (storedKmer.getCount() <= maxObservationsForKmerToBeCorrectable) { + // loop now thru all other kmers to find nearest neighbor + final Pair> nearestNeighbor = findNearestNeighbor(storedKmer.getKmer(),countsByKMer,maxMismatchesToCorrect); + + // check if nearest neighbor lies in a close vicinity. If so, log the new bases and the correction map + if (nearestNeighbor != null) { // ok, found close neighbor + kmerCorrectionMap.put(storedKmer.getKmer(), nearestNeighbor.first); + kmerDifferingBases.put(storedKmer.getKmer(), nearestNeighbor.second); + readErrorCorrectionStats.numCorrectedKmers++; +// if (debug) +// logger.info("Original kmer:"+storedKmer + "\tCorrected kmer:"+nearestNeighbor.first+"\tDistance:"+dist); + } + else + readErrorCorrectionStats.numUncorrectableKmers++; + + } + } + } + + /** + * Finds nearest neighbor of a given k-mer, among a list of counted K-mers, up to a given distance. + * If many k-mers share same closest distance, an arbitrary k-mer is picked + * @param kmer K-mer of interest + * @param countsByKMer KMerCounter storing set of counted k-mers (may include kmer of interest) + * @param maxDistance Maximum distance to search + * @return Pair of values: closest K-mer in Hamming distance and list of differing bases. + * If no neighbor can be found up to given distance, returns null + */ + @Requires({"kmer != null", "countsByKMer != null","maxDistance >= 1"}) + private Pair> findNearestNeighbor(final Kmer kmer, + final KMerCounter countsByKMer, + final int maxDistance) { + int minimumDistance = Integer.MAX_VALUE; + Kmer closestKmer = null; + + final int[] differingIndeces = new int[maxDistance+1]; + final byte[] differingBases = new byte[maxDistance+1]; + + final int[] closestDifferingIndices = new int[maxDistance+1]; + final byte[] closestDifferingBases = new byte[maxDistance+1]; + + for (final KMerCounter.CountedKmer candidateKmer : countsByKMer.getCountedKmers()) { + // skip if candidate set includes test kmer + if (candidateKmer.getKmer().equals(kmer)) + continue; + + final int hammingDistance = kmer.getDifferingPositions(candidateKmer.getKmer(), maxDistance, differingIndeces, differingBases); + if (hammingDistance < 0) // can't compare kmer? skip + continue; + + if (hammingDistance < minimumDistance) { + minimumDistance = hammingDistance; + closestKmer = candidateKmer.getKmer(); + System.arraycopy(differingBases,0,closestDifferingBases,0,differingBases.length); + System.arraycopy(differingIndeces,0,closestDifferingIndices,0,differingIndeces.length); + } + } + return new Pair<>(closestKmer, new Pair<>(closestDifferingIndices,closestDifferingBases)); + } + + + /** + * experimental function to compute max homopolymer length in a given reference context + * @param fullReferenceWithPadding Reference context of interest + * @return Max homopolymer length in region + */ + @Requires("fullReferenceWithPadding != null") + private static int computeMaxHLen(final byte[] fullReferenceWithPadding) { + + int leftRun = 1; + int maxRun = 1; + for ( int i = 1; i < fullReferenceWithPadding.length; i++) { + if ( fullReferenceWithPadding[i] == fullReferenceWithPadding[i-1] ) + leftRun++; + else + leftRun = 1; + } + if (leftRun > maxRun) + maxRun = leftRun; + + + return maxRun; + } + + private static final class ReadErrorCorrectionStats { + public int numReadsCorrected; + public int numReadsUncorrected; + public int numBasesCorrected; + public int numSolidKmers; + public int numUncorrectableKmers; + public int numCorrectedKmers; + } + + /** + * Wrapper utility class that holds, for each position in read, a list of bytes representing candidate corrections. + * So, a read ACAGT where the middle A has found to be errorful might look like: + * 0: {} + * 1: {} + * 2: {'C','C','C'} + * 3: {} + * 4: {} + * + * It's up to the method getConsensusCorrection() to decide how to use the correction sets for each position. + * By default, only strict consensus is allowed right now. + * + */ + protected static class CorrectionSet { + private final int size; + private ArrayList> corrections; + + /** + * Main class constructor. + * @param size Size of correction set, needs to be set equal to the read being corrected + */ + public CorrectionSet(final int size) { + this.size = size; + corrections = new ArrayList<>(size); + for (int k=0; k < size; k++) + corrections.add(k,new ArrayList()); + } + + /** + * Add a base to this correction set at a particular offset, measured from the start of the read + * @param offset Offset from start of read + * @param base base to be added to list of corrections at this offset + */ + public void add(final int offset, final byte base) { + if (offset >= size || offset < 0) + throw new IllegalStateException("Bad entry into CorrectionSet: offset > size"); + if (!BaseUtils.isRegularBase(base)) + return; // no irregular base correction + + final List storedBytes = corrections.get(offset); + storedBytes.add(base); + } + + /** + * Get list of corrections for a particular offset + * @param offset Offset of interest + * @return List of bases representing possible corrections at this offset + */ + public List get(final int offset) { + if (offset >= size || offset < 0) + throw new IllegalArgumentException("Illegal call of CorrectionSet.get(): offset must be < size"); + return corrections.get(offset); + } + + /** + * Get consensus correction for a particular offset. In this implementation, it just boils down to seeing if + * byte list associated with offset has identical values. If so, return this base, otherwise return null. + * @param offset + * @return Consensus base, or null if no consensus possible. + */ + public Byte getConsensusCorrection(final int offset) { + if (offset >= size || offset < 0) + throw new IllegalArgumentException("Illegal call of CorrectionSet.getConsensusCorrection(): offset must be < size"); + final List storedBytes = corrections.get(offset); + if (storedBytes.isEmpty()) + return null; + + // todo - is there a cheaper/nicer way to compare if all elements in list are identical?? + final byte lastBase = storedBytes.remove(storedBytes.size()-1); + for (final Byte b: storedBytes) { + // strict correction rule: all bases must match + if (b != lastBase) + return null; + } + + // all bytes then are equal: + return lastBase; + + } + + + + } +} diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java index 8938af7c2..2b37d90c2 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java @@ -309,7 +309,7 @@ public class BaseGraph extends Default } v = getNextReferenceVertex(v); // advance along the reference path while( v != null && !v.equals(toVertex) ) { - bytes = ArrayUtils.addAll( bytes, getAdditionalSequence(v) ); + bytes = ArrayUtils.addAll(bytes, getAdditionalSequence(v)); v = getNextReferenceVertex(v); // advance along the reference path } if( includeStop && v != null && v.equals(toVertex)) { @@ -388,6 +388,17 @@ public class BaseGraph extends Default return s; } + /** + * Get the set of vertices connected to v by incoming or outgoing edges + * @param v a non-null vertex + * @return a set of vertices {X} connected X -> v or v -> Y + */ + public Set neighboringVerticesOf(final V v) { + final Set s = incomingVerticesOf(v); + s.addAll(outgoingVerticesOf(v)); + return s; + } + /** * Print out the graph in the dot language for visualization * @param destination File to write to @@ -550,7 +561,7 @@ public class BaseGraph extends Default verticesToRemove.removeAll(onPathFromRefSource); removeAllVertices(verticesToRemove); - // simple santity checks that this algorithm is working. + // simple sanity checks that this algorithm is working. if ( getSinks().size() > 1 ) { throw new IllegalStateException("Should have eliminated all but the reference sink, but found " + getSinks()); } @@ -664,4 +675,72 @@ public class BaseGraph extends Default "kmerSize=" + kmerSize + '}'; } + + /** + * The base sequence for the given path. + * Note, this assumes that the path does not start with a source node. + * + * @param path the list of vertexes that make up the path + * @return non-null sequence of bases corresponding to the given path + */ + @Ensures({"result != null"}) + public byte[] getBasesForPath(final List path) { + if ( path == null ) throw new IllegalArgumentException("Path cannot be null"); + + final StringBuffer sb = new StringBuffer(); + for ( final DeBruijnVertex v : path ) + sb.append((char)v.getSuffix()); + + return sb.toString().getBytes(); + } + + /** + * Get the set of vertices within distance edges of source, regardless of edge direction + * + * @param source the source vertex to consider + * @param distance the distance + * @return a set of vertices within distance of source + */ + protected Set verticesWithinDistance(final V source, final int distance) { + if ( distance == 0 ) + return Collections.singleton(source); + + final Set found = new HashSet<>(); + found.add(source); + for ( final V v : neighboringVerticesOf(source) ) { + found.addAll(verticesWithinDistance(v, distance - 1)); + } + + return found; + } + + /** + * Get a graph containing only the vertices within distance edges of target + * @param target a vertex in graph + * @param distance the max distance + * @return a non-null graph + */ + public BaseGraph subsetToNeighbors(final V target, final int distance) { + if ( target == null ) throw new IllegalArgumentException("Target cannot be null"); + if ( ! containsVertex(target) ) throw new IllegalArgumentException("Graph doesn't contain vertex " + target); + if ( distance < 0 ) throw new IllegalArgumentException("Distance must be >= 0 but got " + distance); + + + final Set toKeep = verticesWithinDistance(target, distance); + final Set toRemove = new HashSet<>(vertexSet()); + toRemove.removeAll(toKeep); + + final BaseGraph result = (BaseGraph)clone(); + result.removeAllVertices(toRemove); + + return result; + } + + /** + * Get a subgraph of graph that contains only vertices within 10 edges of the ref source vertex + * @return a non-null subgraph of this graph + */ + public BaseGraph subsetToRefSource() { + return subsetToNeighbors(getReferenceSourceVertex(), 10); + } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/GraphUtils.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/GraphUtils.java index 4aa6047a9..73a1daa3e 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/GraphUtils.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/GraphUtils.java @@ -171,7 +171,15 @@ final public class GraphUtils { return foundDup ? null : new PrimitivePair.Int(longestPos, length); } - private static int longestSuffixMatch(final byte[] seq, final byte[] kmer, final int seqStart) { + /** + * calculates the longest suffix match between a sequence and a smaller kmer + * + * @param seq the (reference) sequence + * @param kmer the smaller kmer sequence + * @param seqStart the index (inclusive) on seq to start looking backwards from + * @return the longest matching suffix + */ + public static int longestSuffixMatch(final byte[] seq, final byte[] kmer, final int seqStart) { for ( int len = 1; len <= kmer.length; len++ ) { final int seqI = seqStart - len + 1; final int kmerI = kmer.length - len; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/LowWeightChainPruner.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/LowWeightChainPruner.java index 7327b5736..27b6bd902 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/LowWeightChainPruner.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/LowWeightChainPruner.java @@ -80,7 +80,7 @@ public class LowWeightChainPruner { final Set edgesToKeep = new LinkedHashSet<>(); for ( final Path linearChain : getLinearChains(graph) ) { - if( mustBeKeep(linearChain, pruneFactor) ) { + if( mustBeKept(linearChain, pruneFactor) ) { // we must keep edges in any path that contains a reference edge or an edge with weight > pruneFactor edgesToKeep.addAll(linearChain.getEdges()); } @@ -96,10 +96,14 @@ public class LowWeightChainPruner { } /** - * Get the maximum pruning multiplicity seen on any edge in this graph - * @return an integer > 0 + * Traverse the edges in the path and determine if any are either ref edges or have weight above + * the pruning factor and should therefore not be pruned away. + * + * @param path the path in question + * @param pruneFactor the integer pruning factor + * @return true if any edge in the path must be kept */ - private boolean mustBeKeep(final Path path, final int pruneFactor) { + private boolean mustBeKept(final Path path, final int pruneFactor) { for ( final E edge : path.getEdges() ) { if ( edge.getPruningMultiplicity() >= pruneFactor || edge.isRef() ) return true; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/MultiSampleEdge.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/MultiSampleEdge.java index c1937e5c8..978d83eb4 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/MultiSampleEdge.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/MultiSampleEdge.java @@ -46,6 +46,8 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; +import java.util.PriorityQueue; + /** * edge class for connecting nodes in the graph that tracks some per-sample information * @@ -63,32 +65,43 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; * e.getPruningMultiplicity() // = 3 */ public class MultiSampleEdge extends BaseEdge { - private int maxSingleSampleMultiplicity, currentSingleSampleMultiplicity; + private int currentSingleSampleMultiplicity; + private final int singleSampleCapacity; + private final PriorityQueue singleSampleMultiplicities; /** * Create a new MultiSampleEdge with weight multiplicity and, if isRef == true, indicates a path through the reference * * @param isRef indicates whether this edge is a path through the reference * @param multiplicity the number of observations of this edge in this sample + * @param singleSampleCapacity the max number of samples to track edge multiplicities */ - public MultiSampleEdge(final boolean isRef, final int multiplicity) { + public MultiSampleEdge(final boolean isRef, final int multiplicity, final int singleSampleCapacity) { super(isRef, multiplicity); - maxSingleSampleMultiplicity = multiplicity; + + if( singleSampleCapacity <= 0 ) { throw new IllegalArgumentException("singleSampleCapacity must be > 0 but found: " + singleSampleCapacity); } + singleSampleMultiplicities = new PriorityQueue<>(singleSampleCapacity); + singleSampleMultiplicities.add(multiplicity); currentSingleSampleMultiplicity = multiplicity; + this.singleSampleCapacity = singleSampleCapacity; } @Override public MultiSampleEdge copy() { - return new MultiSampleEdge(isRef(), getMultiplicity()); // TODO -- should I copy values for other features? + return new MultiSampleEdge(isRef(), getMultiplicity(), singleSampleCapacity); // TODO -- should I copy values for other features? } /** - * update the max single sample multiplicity based on the current single sample multiplicity, and + * update the single sample multiplicities by adding the current single sample multiplicity to the priority queue, and * reset the current single sample multiplicity to 0. */ public void flushSingleSampleMultiplicity() { - if ( currentSingleSampleMultiplicity > maxSingleSampleMultiplicity ) - maxSingleSampleMultiplicity = currentSingleSampleMultiplicity; + singleSampleMultiplicities.add(currentSingleSampleMultiplicity); + if( singleSampleMultiplicities.size() == singleSampleCapacity + 1 ) { + singleSampleMultiplicities.poll(); // remove the lowest multiplicity from the list + } else if( singleSampleMultiplicities.size() > singleSampleCapacity + 1 ) { + throw new IllegalStateException("Somehow the per sample multiplicity list has grown too big: " + singleSampleMultiplicities); + } currentSingleSampleMultiplicity = 0; } @@ -100,20 +113,12 @@ public class MultiSampleEdge extends BaseEdge { @Override public int getPruningMultiplicity() { - return getMaxSingleSampleMultiplicity(); + return singleSampleMultiplicities.peek(); } @Override public String getDotLabel() { - return super.getDotLabel() + "/" + getMaxSingleSampleMultiplicity(); - } - - /** - * Get the maximum multiplicity for this edge seen in any single sample - * @return an integer >= 0 - */ - public int getMaxSingleSampleMultiplicity() { - return maxSingleSampleMultiplicity; + return super.getDotLabel() + "/" + getPruningMultiplicity(); } /** only provided for testing purposes */ diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java index a07b98bb6..2e84e1d22 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java @@ -47,7 +47,6 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; import com.google.java.contract.Ensures; -import com.google.java.contract.Requires; import net.sf.samtools.Cigar; import net.sf.samtools.CigarElement; import net.sf.samtools.CigarOperator; @@ -92,7 +91,7 @@ public class Path { /** * Create a new Path containing no edges and starting at initialVertex * @param initialVertex the starting vertex of the path - * @param graph the graph this path with follow through + * @param graph the graph this path will follow through */ public Path(final T initialVertex, final BaseGraph graph) { if ( initialVertex == null ) throw new IllegalArgumentException("initialVertex cannot be null"); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java index 20edcb39b..36c515073 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java @@ -155,20 +155,29 @@ public final class SeqGraph extends BaseGraph { //logger.info("simplifyGraph iteration " + i); // iterate until we haven't don't anything useful boolean didSomeWork = false; - if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + iteration + ".1.dot"), 0); + printGraphSimplification(new File("simplifyGraph." + iteration + ".1.dot")); didSomeWork |= new MergeDiamonds().transformUntilComplete(); didSomeWork |= new MergeTails().transformUntilComplete(); - if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + iteration + ".2.diamonds_and_tails.dot"), 0); + printGraphSimplification(new File("simplifyGraph." + iteration + ".2.diamonds_and_tails.dot")); didSomeWork |= new SplitCommonSuffices().transformUntilComplete(); - if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + iteration + ".3.split_suffix.dot"), 0); + printGraphSimplification(new File("simplifyGraph." + iteration + ".3.split_suffix.dot")); didSomeWork |= new MergeCommonSuffices().transformUntilComplete(); - if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + iteration + ".4.merge_suffix.dot"), 0); + printGraphSimplification(new File("simplifyGraph." + iteration + ".4.merge_suffix.dot")); didSomeWork |= zipLinearChains(); return didSomeWork; } + /** + * Print simplication step of this graph, if PRINT_SIMPLIFY_GRAPHS is enabled + * @param file the destination for the graph DOT file + */ + private void printGraphSimplification(final File file) { + if ( PRINT_SIMPLIFY_GRAPHS ) + subsetToNeighbors(getReferenceSourceVertex(), 5).printGraph(file, 0); + } + /** * Zip up all of the simple linear chains present in this graph. * @@ -352,7 +361,7 @@ public final class SeqGraph extends BaseGraph { * Merge until the graph has no vertices that are candidates for merging */ public boolean transformUntilComplete() { - boolean didAtLeastOneTranform = false; + boolean didAtLeastOneTransform = false; boolean foundNodesToMerge = true; while( foundNodesToMerge ) { foundNodesToMerge = false; @@ -360,13 +369,13 @@ public final class SeqGraph extends BaseGraph { for( final SeqVertex v : vertexSet() ) { foundNodesToMerge = tryToTransform(v); if ( foundNodesToMerge ) { - didAtLeastOneTranform = true; + didAtLeastOneTransform = true; break; } } } - return didAtLeastOneTranform; + return didAtLeastOneTransform; } /** diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java index 0babd8d56..5d725b1dd 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java @@ -81,7 +81,7 @@ public class SharedSequenceMerger { else { // graph.printGraph(new File("csm." + counter + "." + v.getSequenceString() + "_pre.dot"), 0); - final List edgesToRemove = new LinkedList(); + final List edgesToRemove = new LinkedList<>(); final byte[] prevSeq = prevs.iterator().next().getSequence(); final SeqVertex newV = new SeqVertex(ArrayUtils.addAll(prevSeq, v.getSequence())); graph.addVertex(newV); @@ -124,11 +124,17 @@ public class SharedSequenceMerger { final SeqVertex first = incomingVertices.iterator().next(); for ( final SeqVertex prev : incomingVertices) { if ( ! prev.seqEquals(first) ) + // cannot merge if our sequence isn't the same as the first sequence return false; final Collection prevOuts = graph.outgoingVerticesOf(prev); if ( prevOuts.size() != 1 ) + // prev -> v must be the only edge from prev return false; if ( prevOuts.iterator().next() != v ) + // don't allow cyles + return false; + if ( graph.inDegreeOf(prev) == 0 ) + // cannot merge when any of the incoming nodes are sources return false; } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java index db0ce0880..672c61c0f 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssembler.java @@ -49,12 +49,12 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.readthreading; import org.apache.log4j.Logger; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.LocalAssemblyEngine; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.*; +import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import java.io.File; import java.util.Arrays; -import java.util.Collections; import java.util.LinkedList; import java.util.List; @@ -62,11 +62,16 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { private final static Logger logger = Logger.getLogger(ReadThreadingAssembler.class); private final static int DEFAULT_NUM_PATHS_PER_GRAPH = 128; + private final static int GGA_MODE_ARTIFICIAL_COUNTS = 1000; + private final static int KMER_SIZE_ITERATION_INCREASE = 10; + private final static int MAX_KMER_ITERATIONS_TO_ATTEMPT = 6; /** The min and max kmer sizes to try when building the graph. */ private final List kmerSizes; private final int maxAllowedPathsForReadThreadingAssembler; + private final boolean dontIncreaseKmerSizesForCycles; + private final int numPruningSamples; private boolean requireReasonableNumberOfPaths = false; protected boolean removePathsNotConnectedToRef = true; private boolean justReturnRawGraph = false; @@ -76,10 +81,16 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { this(DEFAULT_NUM_PATHS_PER_GRAPH, Arrays.asList(25)); } - public ReadThreadingAssembler(final int maxAllowedPathsForReadThreadingAssembler, final List kmerSizes) { + public ReadThreadingAssembler(final int maxAllowedPathsForReadThreadingAssembler, final List kmerSizes, final boolean dontIncreaseKmerSizesForCycles, final int numPruningSamples) { super(maxAllowedPathsForReadThreadingAssembler); this.kmerSizes = kmerSizes; this.maxAllowedPathsForReadThreadingAssembler = maxAllowedPathsForReadThreadingAssembler; + this.dontIncreaseKmerSizesForCycles = dontIncreaseKmerSizesForCycles; + this.numPruningSamples = numPruningSamples; + } + + public ReadThreadingAssembler(final int maxAllowedPathsForReadThreadingAssembler, final List kmerSizes) { + this(maxAllowedPathsForReadThreadingAssembler, kmerSizes, true, 1); } /** for testing purposes */ @@ -88,66 +99,117 @@ public class ReadThreadingAssembler extends LocalAssemblyEngine { } @Override - public List assemble( final List reads, final Haplotype refHaplotype) { + public List assemble(final List reads, final Haplotype refHaplotype, final List activeAlleleHaplotypes) { final List graphs = new LinkedList<>(); + // first, try using the requested kmer sizes for ( final int kmerSize : kmerSizes ) { - final ReadThreadingGraph rtgraph = new ReadThreadingGraph(kmerSize, debugGraphTransformations, minBaseQualityToUseInAssembly); + final SeqGraph graph = createGraph(reads, refHaplotype, kmerSize, activeAlleleHaplotypes, dontIncreaseKmerSizesForCycles); + if ( graph != null ) + graphs.add(graph); + } - // add the reference sequence to the graph - rtgraph.addSequence("ref", refHaplotype.getBases(), null, true); - - // Next pull kmers out of every read and throw them on the graph - for( final GATKSAMRecord read : reads ) { - rtgraph.addRead(read); - } - - // actually build the read threading graph - rtgraph.buildGraphIfNecessary(); - if ( debugGraphTransformations ) rtgraph.printGraph(new File("sequenceGraph.0.0.raw_readthreading_graph.dot"), pruneFactor); - - // go through and prune all of the chains where all edges have <= pruneFactor. This must occur - // before recoverDanglingTails in the graph, so that we don't spend a ton of time recovering - // tails that we'll ultimately just trim away anyway, as the dangling tail edges have weight of 1 - rtgraph.pruneLowWeightChains(pruneFactor); - - // look at all chains in the graph that terminate in a non-ref node (dangling sinks) and see if - // we can recover them by merging some N bases from the chain back into the reference uniquely, for - // N < kmerSize - if ( recoverDanglingTails ) rtgraph.recoverDanglingTails(); - - // remove all heading and trailing paths - if ( removePathsNotConnectedToRef ) rtgraph.removePathsNotConnectedToRef(); - - if ( debugGraphTransformations ) rtgraph.printGraph(new File("sequenceGraph.0.1.cleaned_readthreading_graph.dot"), pruneFactor); - - final SeqGraph initialSeqGraph = rtgraph.convertToSequenceGraph(); - - // if the unit tests don't want us to cleanup the graph, just return the raw sequence graph - if ( justReturnRawGraph ) return Collections.singletonList(initialSeqGraph); - - if ( debug ) logger.info("Using kmer size of " + rtgraph.getKmerSize() + " in read threading assembler"); - if ( debugGraphTransformations ) initialSeqGraph.printGraph(new File("sequenceGraph.0.2.initial_seqgraph.dot"), pruneFactor); - initialSeqGraph.cleanNonRefPaths(); // TODO -- I don't this is possible by construction - - final SeqGraph seqGraph = cleanupSeqGraph(initialSeqGraph); - if ( seqGraph != null ) { - if ( ! requireReasonableNumberOfPaths || reasonableNumberOfPaths(seqGraph) ) { - graphs.add(seqGraph); - } + // if none of those worked, iterate over larger sizes if allowed to do so + if ( graphs.isEmpty() && !dontIncreaseKmerSizesForCycles ) { + int kmerSize = MathUtils.arrayMaxInt(kmerSizes) + KMER_SIZE_ITERATION_INCREASE; + int numIterations = 1; + while ( graphs.isEmpty() && numIterations <= MAX_KMER_ITERATIONS_TO_ATTEMPT ) { + // on the last attempt we will allow low complexity graphs + final SeqGraph graph = createGraph(reads, refHaplotype, kmerSize, activeAlleleHaplotypes, numIterations == MAX_KMER_ITERATIONS_TO_ATTEMPT); + if ( graph != null ) + graphs.add(graph); + kmerSize += KMER_SIZE_ITERATION_INCREASE; + numIterations++; } } return graphs; } + /** + * Creates the sequence graph for the given kmerSize + * + * @param reads reads to use + * @param refHaplotype reference haplotype + * @param kmerSize kmer size + * @param activeAlleleHaplotypes the GGA haplotypes to inject into the graph + * @param allowLowComplexityGraphs if true, do not check for low-complexity graphs + * @return sequence graph or null if one could not be created (e.g. because it contains cycles or too many paths or is low complexity) + */ + protected SeqGraph createGraph(final List reads, + final Haplotype refHaplotype, + final int kmerSize, + final List activeAlleleHaplotypes, + final boolean allowLowComplexityGraphs) { + final ReadThreadingGraph rtgraph = new ReadThreadingGraph(kmerSize, debugGraphTransformations, minBaseQualityToUseInAssembly, numPruningSamples); + + // add the reference sequence to the graph + rtgraph.addSequence("ref", refHaplotype.getBases(), null, true); + + // add the artificial GGA haplotypes to the graph + int hapCount = 0; + for ( final Haplotype h : activeAlleleHaplotypes ) { + final int[] counts = new int[h.length()]; + Arrays.fill(counts, GGA_MODE_ARTIFICIAL_COUNTS); + rtgraph.addSequence("activeAllele" + hapCount++, h.getBases(), counts, false); + } + + // Next pull kmers out of every read and throw them on the graph + for( final GATKSAMRecord read : reads ) { + rtgraph.addRead(read); + } + + // actually build the read threading graph + rtgraph.buildGraphIfNecessary(); + + // sanity check: make sure there are no cycles in the graph + if ( rtgraph.hasCycles() ) { + if ( debug ) logger.info("Not using kmer size of " + kmerSize + " in read threading assembler because it contains a cycle"); + return null; + } + + // sanity check: make sure the graph had enough complexity with the given kmer + if ( ! allowLowComplexityGraphs && rtgraph.isLowComplexity() ) { + if ( debug ) logger.info("Not using kmer size of " + kmerSize + " in read threading assembler because it does not produce a graph with enough complexity"); + return null; + } + + printDebugGraphTransform(rtgraph, new File("sequenceGraph.0.0.raw_readthreading_graph.dot")); + + // go through and prune all of the chains where all edges have <= pruneFactor. This must occur + // before recoverDanglingTails in the graph, so that we don't spend a ton of time recovering + // tails that we'll ultimately just trim away anyway, as the dangling tail edges have weight of 1 + rtgraph.pruneLowWeightChains(pruneFactor); + + // look at all chains in the graph that terminate in a non-ref node (dangling sinks) and see if + // we can recover them by merging some N bases from the chain back into the reference + if ( recoverDanglingTails ) rtgraph.recoverDanglingTails(); + + // remove all heading and trailing paths + if ( removePathsNotConnectedToRef ) rtgraph.removePathsNotConnectedToRef(); + + printDebugGraphTransform(rtgraph, new File("sequenceGraph.0.1.cleaned_readthreading_graph.dot")); + + final SeqGraph initialSeqGraph = rtgraph.convertToSequenceGraph(); + + // if the unit tests don't want us to cleanup the graph, just return the raw sequence graph + if ( justReturnRawGraph ) return initialSeqGraph; + + if ( debug ) logger.info("Using kmer size of " + rtgraph.getKmerSize() + " in read threading assembler"); + printDebugGraphTransform(initialSeqGraph, new File("sequenceGraph.0.2.initial_seqgraph.dot")); + initialSeqGraph.cleanNonRefPaths(); // TODO -- I don't this is possible by construction + + final SeqGraph seqGraph = cleanupSeqGraph(initialSeqGraph); + return ( seqGraph != null && requireReasonableNumberOfPaths && !reasonableNumberOfPaths(seqGraph) ) ? null : seqGraph; + } + /** * Did we find a reasonable number of paths in this graph? * @param graph * @return */ private boolean reasonableNumberOfPaths(final SeqGraph graph) { - final KBestPaths pathFinder = new KBestPaths(false); + final KBestPaths pathFinder = new KBestPaths<>(false); final List> allPaths = pathFinder.getKBestPaths(graph, 100000); logger.info("Found " + allPaths.size() + " paths through " + graph + " with maximum " + maxAllowedPathsForReadThreadingAssembler); return allPaths.size() <= maxAllowedPathsForReadThreadingAssembler; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java index 6e9223afb..7d7df2c06 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraph.java @@ -46,27 +46,45 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.readthreading; +import net.sf.samtools.Cigar; +import net.sf.samtools.CigarElement; +import net.sf.samtools.CigarOperator; import org.apache.log4j.Logger; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.KMerCounter; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.Kmer; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.*; +import org.broadinstitute.sting.utils.BaseUtils; import org.broadinstitute.sting.utils.collections.Pair; -import org.broadinstitute.sting.utils.collections.PrimitivePair; +import org.broadinstitute.sting.utils.sam.AlignmentUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.sting.utils.smithwaterman.SWPairwiseAlignment; +import org.broadinstitute.sting.utils.smithwaterman.SmithWaterman; import org.jgrapht.EdgeFactory; +import org.jgrapht.alg.CycleDetector; import java.io.File; import java.util.*; public class ReadThreadingGraph extends BaseGraph { /** - * Edge factory that creates non-reference multiplicity 1 edges + * Edge factory that encapsulates the numPruningSamples assembly parameter */ private static class MyEdgeFactory implements EdgeFactory { - @Override - public MultiSampleEdge createEdge(MultiDeBruijnVertex sourceVertex, MultiDeBruijnVertex targetVertex) { - return new MultiSampleEdge(false, 1); + final int numPruningSamples; + + public MyEdgeFactory(int numPruningSamples) { + this.numPruningSamples = numPruningSamples; } + + @Override + public MultiSampleEdge createEdge(final MultiDeBruijnVertex sourceVertex, final MultiDeBruijnVertex targetVertex) { + return new MultiSampleEdge(false, 1, numPruningSamples); + } + + public MultiSampleEdge createEdge(final boolean isRef, final int multiplicity) { + return new MultiSampleEdge(isRef, multiplicity, numPruningSamples); + } + } private final static Logger logger = Logger.getLogger(ReadThreadingGraph.class); @@ -78,13 +96,10 @@ public class ReadThreadingGraph extends BaseGraph> pending = new LinkedHashMap>(); + private final Map> pending = new LinkedHashMap<>(); /** * A set of non-unique kmers that cannot be used as merge points in the graph @@ -94,7 +109,7 @@ public class ReadThreadingGraph extends BaseGraph their corresponding vertex in the graph */ - private Map uniqueKmers = new LinkedHashMap(); + private Map uniqueKmers = new LinkedHashMap<>(); /** * @@ -111,23 +126,21 @@ public class ReadThreadingGraph extends BaseGraph= 1 */ - protected ReadThreadingGraph(final int kmerSize, final boolean debugGraphTransformations, final byte minBaseQualityToUseInAssembly) { - super(kmerSize, new MyEdgeFactory()); + protected ReadThreadingGraph(final int kmerSize, final boolean debugGraphTransformations, final byte minBaseQualityToUseInAssembly, final int numPruningSamples) { + super(kmerSize, new MyEdgeFactory(numPruningSamples)); if ( kmerSize < 1 ) throw new IllegalArgumentException("bad minkKmerSize " + kmerSize); this.kmerSize = kmerSize; @@ -146,8 +159,6 @@ public class ReadThreadingGraph extends BaseGraph danglingPath, referencePath; + final byte[] danglingPathString, referencePathString; + final Cigar cigar; + + public DanglingTailMergeResult(final List danglingPath, + final List referencePath, + final byte[] danglingPathString, + final byte[] referencePathString, + final Cigar cigar) { + this.danglingPath = danglingPath; + this.referencePath = referencePath; + this.danglingPathString = danglingPathString; + this.referencePathString = referencePathString; + this.cigar = cigar; + } + } + + /** + * Attempt to attach vertex with out-degree == 0 to the graph + * * @param vertex the vertex to recover + * @return 1 if we successfully recovered the vertex and 0 otherwise */ protected int recoverDanglingChain(final MultiDeBruijnVertex vertex) { if ( outDegreeOf(vertex) != 0 ) throw new IllegalStateException("Attempting to recover a dangling tail for " + vertex + " but it has out-degree > 0"); - final byte[] kmer = vertex.getSequence(); - if ( ! nonUniqueKmers.contains(new Kmer(kmer)) ) { - // don't attempt to fix non-unique kmers! - final MultiDeBruijnVertex uniqueMergePoint = danglingTailMergePoint(kmer); - if ( uniqueMergePoint != null ) { - addEdge(vertex, uniqueMergePoint, new MultiSampleEdge(false, 1)); - return 1; - } - } + // generate the CIGAR string from Smith-Waterman between the dangling tail and reference paths + final DanglingTailMergeResult danglingTailMergeResult = generateCigarAgainstReferencePath(vertex); - return 0; + // if the CIGAR is too complex (or couldn't be computed) then we do not allow the merge into the reference path + if ( danglingTailMergeResult == null || ! cigarIsOkayToMerge(danglingTailMergeResult.cigar) ) + return 0; + + // merge + return mergeDanglingTail(danglingTailMergeResult); } /** - * Find a unique merge point for kmer in the reference sequence - * @param kmer the full kmer of the dangling tail - * @return a vertex appropriate to merge kmer into, or null if none could be found + * Determine whether the provided cigar is okay to merge into the reference path + * + * @param cigar the cigar to analyze + * @return true if it's okay to merge, false otherwise */ - private MultiDeBruijnVertex danglingTailMergePoint(final byte[] kmer) { - final PrimitivePair.Int endAndLength = GraphUtils.findLongestUniqueSuffixMatch(refSeq, kmer); - if ( endAndLength != null && endAndLength.second >= MIN_MATCH_LENGTH_TO_RECOVER_DANGLING_TAIL && endAndLength.first + 1 < refKmers.length) { - final int len = endAndLength.second; - final MultiDeBruijnVertex mergePoint = refKmers[endAndLength.first + 1]; -// logger.info("recoverDanglingChain of kmer " + new String(kmer) + " merged to " + mergePoint + " with match size " + len); - final Set nonUniquesAtLength = determineKmerSizeAndNonUniques(len, len).nonUniques; - final Kmer matchedKmer = new Kmer(kmer, kmer.length - len, len); - if ( nonUniquesAtLength.contains(matchedKmer) ) { -// logger.info("Rejecting merge " + new String(kmer) + " because match kmer " + matchedKmer + " isn't unique across all reads"); - return null; - } else { - return mergePoint; - } + protected boolean cigarIsOkayToMerge(final Cigar cigar) { + + final List elements = cigar.getCigarElements(); + + // don't allow more than a couple of different ops + if ( elements.size() > 3 ) + return false; + + // the last element must be an M + if ( elements.get(elements.size() - 1).getOperator() != CigarOperator.M ) + return false; + + // TODO -- do we want to check whether the Ms mismatch too much also? + + return true; + } + + /** + * Actually merge the dangling tail if possible + * + * @param danglingTailMergeResult the result from generating a Cigar for the dangling tail against the reference + * @return 1 if merge was successful, 0 otherwise + */ + protected int mergeDanglingTail(final DanglingTailMergeResult danglingTailMergeResult) { + + final List elements = danglingTailMergeResult.cigar.getCigarElements(); + final CigarElement lastElement = elements.get(elements.size() - 1); + if ( lastElement.getOperator() != CigarOperator.M ) + throw new IllegalArgumentException("The last Cigar element must be an M"); + + final int lastRefIndex = danglingTailMergeResult.cigar.getReferenceLength() - 1; + final int matchingSuffix = Math.min(GraphUtils.longestSuffixMatch(danglingTailMergeResult.referencePathString, danglingTailMergeResult.danglingPathString, lastRefIndex), lastElement.getLength()); + if ( matchingSuffix == 0 ) + return 0; + + final int altIndexToMerge = Math.max(danglingTailMergeResult.cigar.getReadLength() - matchingSuffix - 1, 0); + final int refIndexToMerge = lastRefIndex - matchingSuffix + 1; + addEdge(danglingTailMergeResult.danglingPath.get(altIndexToMerge), danglingTailMergeResult.referencePath.get(refIndexToMerge), ((MyEdgeFactory)getEdgeFactory()).createEdge(false, 1)); + return 1; + } + + /** + * Generates the CIGAR string from the Smith-Waterman alignment of the dangling path (where the + * provided vertex is the sink) and the reference path. + * + * @param vertex the sink of the dangling tail + * @return a SmithWaterman object which can be null if no proper alignment could be generated + */ + protected DanglingTailMergeResult generateCigarAgainstReferencePath(final MultiDeBruijnVertex vertex) { + + // find the lowest common ancestor path between vertex and the reference sink if available + final List altPath = findPathToLowestCommonAncestorOfReference(vertex); + if ( altPath == null || isRefSource(altPath.get(0)) ) + return null; + + // now get the reference path from the LCA + final List refPath = getReferencePath(altPath.get(0)); + + // create the Smith-Waterman strings to use + final byte[] refBases = getBasesForPath(refPath); + final byte[] altBases = getBasesForPath(altPath); + + // run Smith-Waterman to determine the best alignment (and remove trailing deletions since they aren't interesting) + final SmithWaterman alignment = new SWPairwiseAlignment(refBases, altBases, SWPairwiseAlignment.OVERHANG_STRATEGY.INDEL); + return new DanglingTailMergeResult(altPath, refPath, altBases, refBases, AlignmentUtils.removeTrailingDeletions(alignment.getCigar())); + } + + /** + * Finds the path upwards in the graph from this vertex to the reference sequence, including the lowest common ancestor vertex + * + * @param vertex the original vertex + * @return the path if it can be determined or null if this vertex either doesn't merge onto the reference path or + * has an ancestor with multiple incoming edges before hitting the reference path + */ + protected List findPathToLowestCommonAncestorOfReference(final MultiDeBruijnVertex vertex) { + final LinkedList path = new LinkedList<>(); + + MultiDeBruijnVertex v = vertex; + while ( ! isReferenceNode(v) && inDegreeOf(v) == 1 ) { + path.addFirst(v); + v = getEdgeSource(incomingEdgeOf(v)); + } + path.addFirst(v); + + return isReferenceNode(v) ? path : null; + } + + /** + * Finds the path downwards in the graph from this vertex to the reference sink, including this vertex + * + * @param start the reference vertex to start from + * @return the path (non-null, non-empty) + */ + protected List getReferencePath(final MultiDeBruijnVertex start) { + if ( ! isReferenceNode(start) ) throw new IllegalArgumentException("Cannot construct the reference path from a vertex that is not on that path"); + + final List path = new ArrayList<>(); + + MultiDeBruijnVertex v = start; + while ( v != null ) { + path.add(v); + v = getNextReferenceVertex(v); } - return null; + return path; } /** @@ -297,7 +412,7 @@ public class ReadThreadingGraph extends BaseGraph(this).detectCycles(); + } + + /** + * Does the graph not have enough complexity? We define low complexity as a situation where the number + * of non-unique kmers is more than 20% of the total number of kmers. + * + * @return true if the graph has low complexity, false otherwise + */ + public boolean isLowComplexity() { + return nonUniqueKmers.size() * 4 > uniqueKmers.size(); + } + public void recoverDanglingTails() { if ( ! alreadyBuilt ) throw new IllegalStateException("recoverDanglingTails requires the graph be already built"); @@ -332,7 +464,8 @@ public class ReadThreadingGraph extends BaseGraph determineNonUniqueKmers(final SequenceForKmers seqForKmers, final int kmerSize) { // count up occurrences of kmers within each read final KMerCounter counter = new KMerCounter(kmerSize); - for ( int i = 0; i <= seqForKmers.stop - kmerSize; i++ ) { + final int stopPosition = seqForKmers.stop - kmerSize; + for ( int i = 0; i <= stopPosition; i++ ) { final Kmer kmer = new Kmer(seqForKmers.sequence, i, kmerSize); counter.addKmer(kmer, 1); } @@ -578,23 +712,22 @@ public class ReadThreadingGraph extends BaseGraph " + uniqueMergeVertex); // either use our unique merge vertex, or create a new one in the chain final MultiDeBruijnVertex nextVertex = uniqueMergeVertex == null ? createVertex(kmer) : uniqueMergeVertex; - addEdge(prevVertex, nextVertex, new MultiSampleEdge(isRef, count)); + addEdge(prevVertex, nextVertex, ((MyEdgeFactory)getEdgeFactory()).createEdge(isRef, count)); return nextVertex; } /** - * Get the start and stop positions (exclusive) of the longest stretch of high quality bases - * in read + * Add the given read to the sequence graph. Ultimately the read will get sent through addSequence(), but first + * this method ensures we only use high quality bases and accounts for reduced reads, etc. * * @param read a non-null read - * @return the start and stop for high quality bases in read, or null if none exist */ protected void addRead(final GATKSAMRecord read) { final byte[] sequence = read.getReadBases(); @@ -603,7 +736,7 @@ public class ReadThreadingGraph extends BaseGraph= minBaseQualityToUseInAssembly; + } + /** * Get the set of non-unique kmers in this graph. For debugging purposes * @return a non-null set of kmers diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManager.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManager.java index c98fe4d3c..4d50ef951 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManager.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManager.java @@ -130,7 +130,7 @@ public class ConstrainedMateFixingManager { private static final boolean DEBUG = false; /** How often do we check whether we want to emit reads? */ - private final static int EMIT_FREQUENCY = 1000; + protected final static int EMIT_FREQUENCY = 1000; /** * How much could a single read move in position from its original position? @@ -324,7 +324,8 @@ public class ConstrainedMateFixingManager { || noReadCanMoveBefore(read.getMateAlignmentStart(), newRead ) ) ) { // we're already past where the mate started // remove reads from the map that we have emitted -- useful for case where the mate never showed up - forMateMatching.remove(read.getReadName()); + if ( !read.getNotPrimaryAlignmentFlag() ) + forMateMatching.remove(read.getReadName()); if ( DEBUG ) logger.warn(String.format("EMIT! At %d: read %s at %d with isize %d, mate start %d, op = %s", @@ -346,7 +347,8 @@ public class ConstrainedMateFixingManager { private void writeRead(SAMRecord read) { try { - writer.addAlignment(read); + if ( writer != null ) + writer.addAlignment(read); } catch (IllegalArgumentException e) { throw new UserException("If the maximum allowable reads in memory is too small, it may cause reads to be written out of order when trying to write the BAM; please see the --maxReadsInMemory argument for details. " + e.getMessage(), e); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/PairHMMIndelErrorModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/PairHMMIndelErrorModel.java index 363f7a357..c77557da6 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/PairHMMIndelErrorModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/PairHMMIndelErrorModel.java @@ -54,6 +54,7 @@ import org.broadinstitute.sting.utils.clipping.ReadClipper; import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.utils.pairhmm.Log10PairHMM; +import org.broadinstitute.sting.utils.pairhmm.LoglessPairHMM; import org.broadinstitute.sting.utils.pairhmm.PairHMM; import org.broadinstitute.sting.utils.pileup.PileupElement; import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; @@ -78,8 +79,6 @@ public class PairHMMIndelErrorModel { private static final double baseMatchArray[]; private static final double baseMismatchArray[]; - private final static double LOG_ONE_HALF; - private static final int START_HRUN_GAP_IDX = 4; private static final int MAX_HRUN_GAP_IDX = 20; @@ -97,8 +96,6 @@ public class PairHMMIndelErrorModel { ///////////////////////////// static { - LOG_ONE_HALF= -Math.log10(2.0); - baseMatchArray = new double[MAX_CACHED_QUAL+1]; baseMismatchArray = new double[MAX_CACHED_QUAL+1]; for (int k=1; k <= MAX_CACHED_QUAL; k++) { @@ -120,12 +117,11 @@ public class PairHMMIndelErrorModel { case ORIGINAL: pairHMM = new Log10PairHMM(false); break; - case LOGLESS_CACHING: //TODO: still not tested so please do not use yet - //pairHMM = new LoglessCachingPairHMM(); //TODO - add it back when the figure out how to use the protected LoglessCachingPairHMM class - throw new UserException.BadArgumentValue("pairHMM"," this option (LOGLESS_CACHING in UG) is still under development"); - //break; + case LOGLESS_CACHING: + pairHMM = new LoglessPairHMM(); + break; default: - throw new UserException.BadArgumentValue("pairHMM", "Specified pairHMM implementation is unrecognized or incompatible with the UnifiedGenotyper. Acceptable options are ORIGINAL, EXACT or LOGLESS_CACHING (the third option is still under development)."); + throw new UserException.BadArgumentValue("pairHMM", "Specified pairHMM implementation is unrecognized or incompatible with the UnifiedGenotyper. Acceptable options are ORIGINAL, EXACT or LOGLESS_CACHING."); } // fill gap penalty table, affine naive model: @@ -466,7 +462,7 @@ public class PairHMMIndelErrorModel { final double li = readLikelihoods[readIdx][i]; final double lj = readLikelihoods[readIdx][j]; final int readCount = readCounts[readIdx]; - haplotypeLikehoodMatrix[i][j] += readCount * (MathUtils.approximateLog10SumLog10(li, lj) + LOG_ONE_HALF); + haplotypeLikehoodMatrix[i][j] += readCount * (MathUtils.approximateLog10SumLog10(li, lj) + MathUtils.LOG_ONE_HALF); } } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/qc/AssessReducedQuals.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/qc/AssessReducedQuals.java index a3bdc6691..13daee8c9 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/qc/AssessReducedQuals.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/qc/AssessReducedQuals.java @@ -47,6 +47,7 @@ package org.broadinstitute.sting.gatk.walkers.qc; import org.broadinstitute.sting.commandline.Argument; +import org.broadinstitute.sting.commandline.Hidden; import org.broadinstitute.sting.commandline.Output; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; @@ -89,7 +90,7 @@ import java.util.List; * * @author ami */ - +@Hidden public class AssessReducedQuals extends LocusWalker implements TreeReducible { private static final String reduced = "reduced"; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantrecalibration/GaussianMixtureModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantrecalibration/GaussianMixtureModel.java index eef9da84a..efc24d5f9 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantrecalibration/GaussianMixtureModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/variantrecalibration/GaussianMixtureModel.java @@ -47,7 +47,6 @@ package org.broadinstitute.sting.gatk.walkers.variantrecalibration; import Jama.Matrix; -import cern.jet.random.Normal; import org.apache.log4j.Logger; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.MathUtils; @@ -226,6 +225,20 @@ public class GaussianMixtureModel { isModelReadyForEvaluation = true; } + /** + * A version of Log10SumLog10 that tolerates NaN values in the array + * + * In the case where one or more of the values are NaN, this function returns NaN + * + * @param values a non-null vector of doubles + * @return log10 of the sum of the log10 values, or NaN + */ + private double nanTolerantLog10SumLog10(final double[] values) { + for ( final double value : values ) + if ( Double.isNaN(value) ) return Double.NaN; + return MathUtils.log10sumLog10(values); + } + public double evaluateDatum( final VariantDatum datum ) { for( final boolean isNull : datum.isNull ) { if( isNull ) { return evaluateDatumMarginalized( datum ); } @@ -236,21 +249,19 @@ public class GaussianMixtureModel { for( final MultivariateGaussian gaussian : gaussians ) { pVarInGaussianLog10[gaussianIndex++] = gaussian.pMixtureLog10 + gaussian.evaluateDatumLog10( datum ); } - return MathUtils.log10sumLog10(pVarInGaussianLog10); // Sum(pi_k * p(v|n,k)) + return nanTolerantLog10SumLog10(pVarInGaussianLog10); // Sum(pi_k * p(v|n,k)) } // Used only to decide which covariate dimension is most divergent in order to report in the culprit info field annotation public Double evaluateDatumInOneDimension( final VariantDatum datum, final int iii ) { if(datum.isNull[iii]) { return null; } - final Normal normal = new Normal(0.0, 1.0, null); final double[] pVarInGaussianLog10 = new double[gaussians.size()]; int gaussianIndex = 0; for( final MultivariateGaussian gaussian : gaussians ) { - normal.setState( gaussian.mu[iii], gaussian.sigma.get(iii, iii) ); - pVarInGaussianLog10[gaussianIndex++] = gaussian.pMixtureLog10 + Math.log10( normal.pdf( datum.annotations[iii] ) ); + pVarInGaussianLog10[gaussianIndex++] = gaussian.pMixtureLog10 + MathUtils.normalDistributionLog10(gaussian.mu[iii], gaussian.sigma.get(iii, iii), datum.annotations[iii]); } - return MathUtils.log10sumLog10(pVarInGaussianLog10); // Sum(pi_k * p(v|n,k)) + return nanTolerantLog10SumLog10(pVarInGaussianLog10); // Sum(pi_k * p(v|n,k)) } public double evaluateDatumMarginalized( final VariantDatum datum ) { diff --git a/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java b/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java index 54061c781..e7e5cf0e1 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java +++ b/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java @@ -80,18 +80,18 @@ class AllHaplotypeBAMWriter extends HaplotypeBAMWriter { final List bestHaplotypes, final Set calledHaplotypes, final Map stratifiedReadMap) { - writeHaplotypesAsReads(haplotypes, new HashSet(bestHaplotypes), paddedReferenceLoc); + writeHaplotypesAsReads(haplotypes, new HashSet<>(bestHaplotypes), paddedReferenceLoc); // we need to remap the Alleles back to the Haplotypes; inefficient but unfortunately this is a requirement currently - final Map alleleToHaplotypeMap = new HashMap(haplotypes.size()); + final Map alleleToHaplotypeMap = new HashMap<>(haplotypes.size()); for ( final Haplotype haplotype : haplotypes ) alleleToHaplotypeMap.put(Allele.create(haplotype.getBases()), haplotype); // next, output the interesting reads for each sample aligned against the appropriate haplotype for ( final PerReadAlleleLikelihoodMap readAlleleLikelihoodMap : stratifiedReadMap.values() ) { - for ( Map.Entry> entry : readAlleleLikelihoodMap.getLikelihoodReadMap().entrySet() ) { + for ( final Map.Entry> entry : readAlleleLikelihoodMap.getLikelihoodReadMap().entrySet() ) { final MostLikelyAllele bestAllele = PerReadAlleleLikelihoodMap.getMostLikelyAllele(entry.getValue()); - writeReadAgainstHaplotype(entry.getKey(), alleleToHaplotypeMap.get(bestAllele.getMostLikelyAllele()), paddedReferenceLoc.getStart()); + writeReadAgainstHaplotype(entry.getKey(), alleleToHaplotypeMap.get(bestAllele.getMostLikelyAllele()), paddedReferenceLoc.getStart(), bestAllele.isInformative()); } } } diff --git a/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java b/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java index d63cf65fc..7206dd674 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java +++ b/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java @@ -87,7 +87,7 @@ class CalledHaplotypeBAMWriter extends HaplotypeBAMWriter { writeHaplotypesAsReads(calledHaplotypes, calledHaplotypes, paddedReferenceLoc); // we need to remap the Alleles back to the Haplotypes; inefficient but unfortunately this is a requirement currently - final Map alleleToHaplotypeMap = new HashMap(haplotypes.size()); + final Map alleleToHaplotypeMap = new HashMap<>(haplotypes.size()); for ( final Haplotype haplotype : calledHaplotypes ) { alleleToHaplotypeMap.put(Allele.create(haplotype.getBases()), haplotype); } @@ -97,10 +97,10 @@ class CalledHaplotypeBAMWriter extends HaplotypeBAMWriter { // next, output the interesting reads for each sample aligned against one of the called haplotypes for ( final PerReadAlleleLikelihoodMap readAlleleLikelihoodMap : stratifiedReadMap.values() ) { - for ( Map.Entry> entry : readAlleleLikelihoodMap.getLikelihoodReadMap().entrySet() ) { + for ( final Map.Entry> entry : readAlleleLikelihoodMap.getLikelihoodReadMap().entrySet() ) { if ( entry.getKey().getMappingQuality() > 0 ) { final MostLikelyAllele bestAllele = PerReadAlleleLikelihoodMap.getMostLikelyAllele(entry.getValue(), allelesOfCalledHaplotypes); - writeReadAgainstHaplotype(entry.getKey(), alleleToHaplotypeMap.get(bestAllele.getMostLikelyAllele()), paddedReferenceLoc.getStart()); + writeReadAgainstHaplotype(entry.getKey(), alleleToHaplotypeMap.get(bestAllele.getMostLikelyAllele()), paddedReferenceLoc.getStart(), bestAllele.isInformative()); } } } diff --git a/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java b/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java index 2eea664d9..1afbeed63 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java +++ b/protected/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java @@ -185,11 +185,13 @@ public abstract class HaplotypeBAMWriter { * @param originalRead the read we want to write aligned to the reference genome * @param haplotype the haplotype that the read should be aligned to, before aligning to the reference * @param referenceStart the start of the reference that haplotype is aligned to. Provides global coordinate frame. + * @param isInformative true if the read is differentially informative for one of the haplotypes */ protected void writeReadAgainstHaplotype(final GATKSAMRecord originalRead, final Haplotype haplotype, - final int referenceStart) { - final GATKSAMRecord alignedToRef = createReadAlignedToRef(originalRead, haplotype, referenceStart); + final int referenceStart, + final boolean isInformative) { + final GATKSAMRecord alignedToRef = createReadAlignedToRef(originalRead, haplotype, referenceStart, isInformative); if ( alignedToRef != null ) bamWriter.addAlignment(alignedToRef); } @@ -201,11 +203,13 @@ public abstract class HaplotypeBAMWriter { * @param originalRead the read we want to write aligned to the reference genome * @param haplotype the haplotype that the read should be aligned to, before aligning to the reference * @param referenceStart the start of the reference that haplotype is aligned to. Provides global coordinate frame. + * @param isInformative true if the read is differentially informative for one of the haplotypes * @return a GATKSAMRecord aligned to reference, or null if no meaningful alignment is possible */ protected GATKSAMRecord createReadAlignedToRef(final GATKSAMRecord originalRead, final Haplotype haplotype, - final int referenceStart) { + final int referenceStart, + final boolean isInformative) { if ( originalRead == null ) throw new IllegalArgumentException("originalRead cannot be null"); if ( haplotype == null ) throw new IllegalArgumentException("haplotype cannot be null"); if ( haplotype.getCigar() == null ) throw new IllegalArgumentException("Haplotype cigar not set " + haplotype); @@ -225,6 +229,10 @@ public abstract class HaplotypeBAMWriter { addHaplotypeTag(read, haplotype); + // uninformative reads are set to zero mapping quality to enhance visualization + if ( !isInformative ) + read.setMappingQuality(0); + // compute here the read starts w.r.t. the reference from the SW result and the hap -> ref cigar final Cigar extendedHaplotypeCigar = haplotype.getConsolidatedPaddedCigar(1000); final int readStartOnHaplotype = AlignmentUtils.calcFirstBaseMatchingReferenceInCigar(extendedHaplotypeCigar, swPairwiseAlignment.getAlignmentStart2wrt1()); diff --git a/protected/java/src/org/broadinstitute/sting/utils/pairhmm/LoglessPairHMM.java b/protected/java/src/org/broadinstitute/sting/utils/pairhmm/LoglessPairHMM.java index ab2a5bb2a..184a2689d 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/pairhmm/LoglessPairHMM.java +++ b/protected/java/src/org/broadinstitute/sting/utils/pairhmm/LoglessPairHMM.java @@ -55,7 +55,7 @@ import org.broadinstitute.sting.utils.QualityUtils; * User: rpoplin, carneiro * Date: 10/16/12 */ -public final class LoglessPairHMM extends PairHMM { +public final class LoglessPairHMM extends N2MemoryPairHMM { protected static final double INITIAL_CONDITION = Math.pow(2, 1020); protected static final double INITIAL_CONDITION_LOG10 = Math.log10(INITIAL_CONDITION); @@ -99,8 +99,13 @@ public final class LoglessPairHMM extends PairHMM { } } - if ( ! constantsAreInitialized || recacheReadValues ) - initializeProbabilities(insertionGOP, deletionGOP, overallGCP); + if ( ! constantsAreInitialized || recacheReadValues ) { + initializeProbabilities(transition, insertionGOP, deletionGOP, overallGCP); + + // note that we initialized the constants + constantsAreInitialized = true; + } + initializePriors(haplotypeBases, readBases, readQuals, hapStartIndex); for (int i = 1; i < paddedReadLength; i++) { @@ -159,7 +164,7 @@ public final class LoglessPairHMM extends PairHMM { "overallGCP != null" }) @Ensures("constantsAreInitialized") - private void initializeProbabilities(final byte[] insertionGOP, final byte[] deletionGOP, final byte[] overallGCP) { + protected static void initializeProbabilities(final double[][] transition, final byte[] insertionGOP, final byte[] deletionGOP, final byte[] overallGCP) { for (int i = 0; i < insertionGOP.length; i++) { final int qualIndexGOP = Math.min(insertionGOP[i] + deletionGOP[i], Byte.MAX_VALUE); transition[i+1][matchToMatch] = QualityUtils.qualToProb((byte) qualIndexGOP); @@ -169,9 +174,6 @@ public final class LoglessPairHMM extends PairHMM { transition[i+1][matchToDeletion] = QualityUtils.qualToErrorProb(deletionGOP[i]); transition[i+1][deletionToDeletion] = QualityUtils.qualToErrorProb(overallGCP[i]); } - - // note that we initialized the constants - constantsAreInitialized = true; } /** diff --git a/protected/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMMTestData.java b/protected/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMMTestData.java new file mode 100644 index 000000000..3d8137ecf --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMMTestData.java @@ -0,0 +1,162 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.utils.pairhmm; + +import net.sf.samtools.SAMUtils; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.text.XReadLines; + +import java.io.*; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.zip.GZIPInputStream; + +/** + * Useful single class carrying test data for PairHMMs (for use in benchmarking and unit tests) + * + * User: depristo + * Date: 5/12/13 + * Time: 3:52 PM + * To change this template use File | Settings | File Templates. + */ +public class PairHMMTestData { + public final String ref; + private final String read; + public final byte[] baseQuals, insQuals, delQuals, gcp; + public final double log10l; + + PairHMMTestData(String ref, String read, byte[] baseQuals, byte[] insQuals, byte[] delQuals, byte[] gcp, double log10l) { + this.ref = ref; + this.read = read; + this.baseQuals = baseQuals; + this.insQuals = insQuals; + this.delQuals = delQuals; + this.gcp = gcp; + this.log10l = log10l; + } + + PairHMMTestData(String ref, String read, final byte qual) { + this.ref = ref; + this.read = read; + this.baseQuals = this.insQuals = this.delQuals = Utils.dupBytes(qual, read.length()); + this.gcp = Utils.dupBytes((byte)10, read.length()); + this.log10l = -1; + } + + public double runHMM(final PairHMM hmm) { + hmm.initialize(getRead().length(), ref.length()); + return hmm.computeReadLikelihoodGivenHaplotypeLog10(ref.getBytes(), getRead().getBytes(), + baseQuals, insQuals, delQuals, gcp, true); + } + + @Override + public String toString() { + return "Info{" + + "ref='" + ref + '\'' + + ", read='" + getRead() + '\'' + + ", log10l=" + log10l + + '}'; + } + + public static void runHMMs(final PairHMM hmm, final List data, final boolean runSingly) { + if ( runSingly ) { + for ( final PairHMMTestData datum : data ) + datum.runHMM(hmm); + } else { + // running in batch mode + final PairHMMTestData first = data.get(0); + int maxHaplotypeLen = calcMaxHaplotypeLen(data); + hmm.initialize(first.getRead().length(), maxHaplotypeLen); + for ( final PairHMMTestData datum : data ) { + hmm.computeReadLikelihoodGivenHaplotypeLog10(datum.ref.getBytes(), datum.getRead().getBytes(), + datum.baseQuals, datum.insQuals, datum.delQuals, datum.gcp, false); + + } + } + } + + public static int calcMaxHaplotypeLen(final List data) { + int maxHaplotypeLen = 0; + for ( final PairHMMTestData datum : data ) + maxHaplotypeLen = Math.max(maxHaplotypeLen, datum.ref.length()); + return maxHaplotypeLen; + } + + public static Map> readLikelihoods(final File file) throws IOException { + final Map> results = new LinkedHashMap<>(); + + InputStream in = new FileInputStream(file); + if ( file.getName().endsWith(".gz") ) { + in = new GZIPInputStream(in); + } + + for ( final String line : new XReadLines(in) ) { + final String[] parts = line.split(" "); + final PairHMMTestData info = new PairHMMTestData( + parts[0], parts[1], + SAMUtils.fastqToPhred(parts[2]), + SAMUtils.fastqToPhred(parts[3]), + SAMUtils.fastqToPhred(parts[4]), + SAMUtils.fastqToPhred(parts[5]), + Double.parseDouble(parts[6])); + + if ( ! results.containsKey(info.read) ) { + results.put(info.read, new LinkedList()); + } + final List byHap = results.get(info.read); + byHap.add(info); + } + + return results; + } + + public String getRead() { + return read; + } +} diff --git a/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalUtils.java b/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalUtils.java index ae6b56e19..56f7e8257 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalUtils.java +++ b/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalUtils.java @@ -70,9 +70,7 @@ import org.broadinstitute.sting.utils.sam.GATKSAMReadGroupRecord; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.sting.utils.sam.ReadUtils; -import java.io.File; -import java.io.IOException; -import java.io.PrintStream; +import java.io.*; import java.util.*; /** @@ -223,6 +221,150 @@ public class RecalUtils { } } + /** + * Component used to print out csv representation of the reports that can be use to perform analysis in + * external tools. E.g. generate plots using R scripts. + *

+ * A header is always printed into the output stream (or file) when the printer is created. Then you only need + * to call {@link #print(RecalibrationReport,String) print} for each report you want to include in the csv file. + * Once finished, you close the printer calling {@link #close() close} + * + */ + private static class CsvPrinter { + + private final PrintStream ps; + private final Covariate[] covariates; + + /** + * Constructs a printer redirected to an output file. + * @param out the output file. + * @param c covariates to print out. + * @throws FileNotFoundException if the file could not be created anew. + */ + protected CsvPrinter(final File out, final Covariate ... c) + throws FileNotFoundException { + this(new FileOutputStream(out), c); + } + + /** + * Constructs a printer redirected to an output stream + * @param os the output. + * @param c covariates to print out. + */ + protected CsvPrinter(final OutputStream os, final Covariate ... c) { + covariates = c == null ? new Covariate[0] : c.clone(); + ps = new PrintStream(os); + printHeader(); + } + + /** + * Prints the header out. + *

+ * Should only be invoked at creation. + */ + protected void printHeader() { + RecalUtils.printHeader(ps); + } + + /** + * Prints out a report into the csv file. + * + * + * @param report the report to print out. + * @param mode the report associated mode. (typically ORIGINAL, RECALIBRATED + */ + public void print(final RecalibrationReport report, final String mode) { + RecalUtils.writeCSV(ps,report.getRecalibrationTables(),mode,covariates,false); + } + + /** + * Close the csv printer. + * + * No further output will be allowed or take place after calling this method. + */ + public void close() { + ps.close(); + } + + } + + /** + * Returns a csv output printer. + * + * @param out the output file. It will be overridden + * @param c list of covariates to print out. + * + * @throws FileNotFoundException if out could not be created anew. + * + * @return never null + */ + protected static CsvPrinter csvPrinter(final File out, final Covariate ... c) + throws FileNotFoundException + { + if (c == null) { + throw new IllegalArgumentException("the input covariate array cannot be null"); + } + return new CsvPrinter(out,c); + } + + /** + * Prints out a collection of reports into a file in Csv format in a way + * that can be used by R scripts (such as the plot generator script). + *

+ * The set of covariates is take as the minimum common set from all reports. + * + * @param out the output file. It will be overridden. + * @param reports map where keys are the unique 'mode' (ORIGINAL, RECALIBRATED, ...) + * of each report and the corresponding value the report itself. + * @throws FileNotFoundException if out could not be created anew. + */ + public static void generateCsv(final File out, final Map reports) + throws FileNotFoundException { + if (reports.size() == 0) { + writeCsv(out, reports, new Covariate[0]); + } else { + final Iterator rit = reports.values().iterator(); + final RecalibrationReport first = rit.next(); + final Covariate[] firstCovariates = first.getRequestedCovariates(); + final Set covariates = new LinkedHashSet<>(); + Utils.addAll(covariates,firstCovariates); + while (rit.hasNext() && covariates.size() > 0) { + final Covariate[] nextCovariates = rit.next().getRequestedCovariates(); + final Set nextCovariateNames = new LinkedHashSet(nextCovariates.length); + for (final Covariate nc : nextCovariates) { + nextCovariateNames.add(nc.getClass().getSimpleName()); + } + final Iterator cit = covariates.iterator(); + while (cit.hasNext()) { + if (!nextCovariateNames.contains(cit.next().getClass().getSimpleName())) { + cit.remove(); + } + } + } + writeCsv(out, reports, covariates.toArray(new Covariate[covariates.size()])); + } + } + + /** + * Print out a collection of reports into a file in Csv format in a way + * that can be used by R scripts (such as the plot generator script). + * + * @param out + * @param reports map where keys are the unique 'mode' (ORIGINAL, RECALIBRATED, ...) + * of each report and the corresponding value the report itself. + * @param c the covariates to print out. + * @throws FileNotFoundException if out could not be created anew. + */ + private static void writeCsv(final File out, + final Map reports, final Covariate[] c) + throws FileNotFoundException { + final CsvPrinter p = csvPrinter(out,c); + for (Map.Entry e : reports.entrySet()) { + p.print(e.getValue(),e.getKey()); + } + p.close(); + } + public enum SOLID_RECAL_MODE { /** * Treat reference inserted bases as reference matching bases. Very unsafe! @@ -390,36 +532,66 @@ public class RecalUtils { report.print(outputFile); } - private static void outputRecalibrationPlot(final RecalibrationArgumentCollection RAC) { - + /** s + * Write recalibration plots into a file + * + * @param csvFile location of the intermediary file + * @param exampleReportFile where the report arguments are collected from. + * @param output result plot file name. + */ + public static void generatePlots(final File csvFile, final File exampleReportFile, final File output) { final RScriptExecutor executor = new RScriptExecutor(); + executor.setExceptOnError(true); executor.addScript(new Resource(SCRIPT_FILE, RecalUtils.class)); - executor.addArgs(RAC.RECAL_CSV_FILE.getAbsolutePath()); - executor.addArgs(RAC.RECAL_TABLE_FILE.getAbsolutePath()); - executor.addArgs(RAC.RECAL_PDF_FILE.getAbsolutePath()); + executor.addArgs(csvFile.getAbsolutePath()); + executor.addArgs(exampleReportFile.getAbsolutePath()); + executor.addArgs(output.getAbsolutePath()); + Logger.getLogger(RecalUtils.class).debug("R command line: " + executor.getApproximateCommandLine()); executor.exec(); } + private static void outputRecalibrationPlot(final File csvFile, final RecalibrationArgumentCollection RAC) { + + final RScriptExecutor executor = new RScriptExecutor(); + executor.addScript(new Resource(SCRIPT_FILE, RecalUtils.class)); + executor.addArgs(csvFile.getAbsolutePath()); + executor.addArgs(RAC.RECAL_TABLE_FILE.getAbsolutePath()); + executor.exec(); + } + + /** + * Please use {@link #generateCsv(java.io.File, java.util.Map)} and {@link #generatePlots(java.io.File, java.io.File, java.io.File)} instead. + * + * @deprecated + */ + @Deprecated public static void generateRecalibrationPlot(final RecalibrationArgumentCollection RAC, final RecalibrationTables original, final Covariate[] requestedCovariates) { generateRecalibrationPlot(RAC, original, null, requestedCovariates); } + /** + * Please use {@link #generateCsv(java.io.File, java.util.Map)} and {@link #generatePlots(java.io.File, java.io.File, java.io.File)} instead. + * + * @deprecated + */ + @Deprecated public static void generateRecalibrationPlot(final RecalibrationArgumentCollection RAC, final RecalibrationTables original, final RecalibrationTables recalibrated, final Covariate[] requestedCovariates) { - final PrintStream csvFile; + final PrintStream csvStream; + final File csvTempFile = null; try { - if ( RAC.RECAL_CSV_FILE == null ) { - RAC.RECAL_CSV_FILE = File.createTempFile("BQSR", ".csv"); - RAC.RECAL_CSV_FILE.deleteOnExit(); - } - csvFile = new PrintStream(RAC.RECAL_CSV_FILE); + File csvTmpFile = File.createTempFile("BQSR",".csv"); + csvTmpFile.deleteOnExit(); + csvStream = new PrintStream(csvTmpFile); } catch (IOException e) { - throw new UserException.CouldNotCreateOutputFile(RAC.RECAL_CSV_FILE, e); + throw new UserException("Could not create temporary csv file", e); } if ( recalibrated != null ) - writeCSV(csvFile, recalibrated, "RECALIBRATED", requestedCovariates, true); - writeCSV(csvFile, original, "ORIGINAL", requestedCovariates, recalibrated == null); - outputRecalibrationPlot(RAC); + writeCSV(csvStream, recalibrated, "RECALIBRATED", requestedCovariates, true); + writeCSV(csvStream, original, "ORIGINAL", requestedCovariates, recalibrated == null); + csvStream.close(); + outputRecalibrationPlot(csvTempFile, RAC); + csvTempFile.delete(); } private static void writeCSV(final PrintStream deltaTableFile, final RecalibrationTables recalibrationTables, final String recalibrationMode, final Covariate[] requestedCovariates, final boolean printHeader) { @@ -452,18 +624,7 @@ public class RecalUtils { // output the csv file if (printHeader) { - final List header = new LinkedList(); - header.add("ReadGroup"); - header.add("CovariateValue"); - header.add("CovariateName"); - header.add("EventType"); - header.add("Observations"); - header.add("Errors"); - header.add("EmpiricalQuality"); - header.add("AverageReportedQuality"); - header.add("Accuracy"); - header.add("Recalibration"); - deltaTableFile.println(Utils.join(",", header)); + printHeader(deltaTableFile); } final Map covariateNameMap = new HashMap(requestedCovariates.length); @@ -480,6 +641,21 @@ public class RecalUtils { } } + private static void printHeader(PrintStream out) { + final List header = new LinkedList(); + header.add("ReadGroup"); + header.add("CovariateValue"); + header.add("CovariateName"); + header.add("EventType"); + header.add("Observations"); + header.add("Errors"); + header.add("EmpiricalQuality"); + header.add("AverageReportedQuality"); + header.add("Accuracy"); + header.add("Recalibration"); + out.println(Utils.join(",", header)); + } + /* * Return an initialized nested integer array with appropriate dimensions for use with the delta tables * diff --git a/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalibrationReport.java b/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalibrationReport.java index ea45c2abf..091b5ecf0 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalibrationReport.java +++ b/protected/java/src/org/broadinstitute/sting/utils/recalibration/RecalibrationReport.java @@ -340,9 +340,6 @@ public class RecalibrationReport { else if (argument.equals("recalibration_report")) RAC.existingRecalibrationReport = (value == null) ? null : new File((String) value); - else if (argument.equals("plot_pdf_file")) - RAC.RECAL_PDF_FILE = (value == null) ? null : new File((String) value); - else if (argument.equals("binary_tag_name")) RAC.BINARY_TAG_NAME = (value == null) ? null : (String) value; @@ -369,6 +366,11 @@ public class RecalibrationReport { return RAC; } + /** + * + * @deprecated use {@link #getRequestedCovariates()} instead. + */ + @Deprecated public Covariate[] getCovariates() { return requestedCovariates; } diff --git a/protected/java/src/org/broadinstitute/sting/utils/recalibration/covariates/ContextCovariate.java b/protected/java/src/org/broadinstitute/sting/utils/recalibration/covariates/ContextCovariate.java index 4fc9470f4..79ffa50a3 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/recalibration/covariates/ContextCovariate.java +++ b/protected/java/src/org/broadinstitute/sting/utils/recalibration/covariates/ContextCovariate.java @@ -67,6 +67,8 @@ import java.util.ArrayList; public class ContextCovariate implements StandardCovariate { private final static Logger logger = Logger.getLogger(ContextCovariate.class); + + private int mismatchesContextSize; private int indelsContextSize; diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/RankSumUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/RankSumUnitTest.java new file mode 100644 index 000000000..fec83e1a8 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/RankSumUnitTest.java @@ -0,0 +1,151 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.annotator; + +import org.broadinstitute.sting.gatk.walkers.compression.reducereads.*; +import org.broadinstitute.sting.gatk.walkers.compression.reducereads.BaseCounts; +import org.broadinstitute.sting.utils.MannWhitneyU; +import org.testng.Assert; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +public class RankSumUnitTest { + + List distribution20, distribution30, distribution20_40; + static final int observations = 100; + + @BeforeClass + public void init() { + distribution20 = new ArrayList<>(observations); + distribution30 = new ArrayList<>(observations); + distribution20_40 = new ArrayList<>(observations); + + final int skew = 3; + makeDistribution(distribution20, 20, skew, observations); + makeDistribution(distribution30, 30, skew, observations); + makeDistribution(distribution20_40, 20, skew, observations/2); + makeDistribution(distribution20_40, 40, skew, observations/2); + + // shuffle the observations + Collections.shuffle(distribution20); + Collections.shuffle(distribution30); + Collections.shuffle(distribution20_40); + } + + private static void makeDistribution(final List result, final int target, final int skew, final int numObservations) { + final int rangeStart = target - skew; + final int rangeEnd = target + skew; + + int current = rangeStart; + for ( int i = 0; i < numObservations; i++ ) { + result.add(current++); + if ( current > rangeEnd ) + current = rangeStart; + } + } + + @DataProvider(name = "DistributionData") + public Object[][] makeDistributionData() { + List tests = new ArrayList(); + + for ( final int numToReduce : Arrays.asList(0, 10, 50, 100) ) { + tests.add(new Object[]{distribution20, distribution20, numToReduce, true, "20-20"}); + tests.add(new Object[]{distribution30, distribution30, numToReduce, true, "30-30"}); + tests.add(new Object[]{distribution20_40, distribution20_40, numToReduce, true, "20/40-20/40"}); + + tests.add(new Object[]{distribution20, distribution30, numToReduce, false, "20-30"}); + tests.add(new Object[]{distribution30, distribution20, numToReduce, false, "30-20"}); + + tests.add(new Object[]{distribution20, distribution20_40, numToReduce, false, "20-20/40"}); + tests.add(new Object[]{distribution30, distribution20_40, numToReduce, true, "30-20/40"}); + } + + return tests.toArray(new Object[][]{}); + } + + @Test(enabled = true, dataProvider = "DistributionData") + public void testDistribution(final List distribution1, final List distribution2, final int numToReduceIn2, final boolean distributionsShouldBeEqual, final String debugString) { + final MannWhitneyU mannWhitneyU = new MannWhitneyU(true); + + for ( final Integer num : distribution1 ) + mannWhitneyU.add(num, MannWhitneyU.USet.SET1); + + final List dist2 = new ArrayList<>(distribution2); + if ( numToReduceIn2 > 0 ) { + final org.broadinstitute.sting.gatk.walkers.compression.reducereads.BaseCounts counts = new BaseCounts(); + for ( int i = 0; i < numToReduceIn2; i++ ) { + final int value = dist2.remove(0); + counts.incr(BaseIndex.A, (byte)value, 0, false); + } + + final int qual = (int)counts.averageQualsOfBase(BaseIndex.A); + for ( int i = 0; i < numToReduceIn2; i++ ) + dist2.add(qual); + } + + for ( final Integer num : dist2 ) + mannWhitneyU.add(num, MannWhitneyU.USet.SET2); + + final Double result = mannWhitneyU.runTwoSidedTest().second; + Assert.assertFalse(Double.isNaN(result)); + + if ( distributionsShouldBeEqual ) { + // TODO -- THIS IS THE FAILURE POINT OF USING REDUCED READS WITH RANK SUM TESTS + if ( numToReduceIn2 >= observations / 2 ) + return; + Assert.assertTrue(result > 0.1, String.format("%f %d %d", result, numToReduceIn2, dist2.get(0))); + } else { + Assert.assertTrue(result < 0.01, String.format("%f %d %d", result, numToReduceIn2, dist2.get(0))); + } + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorIntegrationTest.java index 961a28bcf..e7d7300ae 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorIntegrationTest.java @@ -78,7 +78,7 @@ public class VariantAnnotatorIntegrationTest extends WalkerTest { public void testHasAnnotsAsking1() { WalkerTestSpec spec = new WalkerTestSpec( baseTestString() + " -G Standard --variant " + privateTestDir + "vcfexample2.vcf -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -L 1:10,020,000-10,021,000", 1, - Arrays.asList("42889072698af972f2004ccfe8eae15e")); + Arrays.asList("823868a4b5b5ec2cdf080c059d04d31a")); executeTest("test file has annotations, asking for annotations, #1", spec); } @@ -112,7 +112,7 @@ public class VariantAnnotatorIntegrationTest extends WalkerTest { public void testNoAnnotsAsking1() { WalkerTestSpec spec = new WalkerTestSpec( baseTestString() + " -G Standard --variant " + privateTestDir + "vcfexample2empty.vcf -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -L 1:10,020,000-10,021,000", 1, - Arrays.asList("7e755bb09169699b76850e76b71a5f5a")); + Arrays.asList("6f873b3152db291e18e3a04fbce2e117")); executeTest("test file doesn't have annotations, asking for annotations, #1", spec); } @@ -128,7 +128,7 @@ public class VariantAnnotatorIntegrationTest extends WalkerTest { public void testExcludeAnnotations() { WalkerTestSpec spec = new WalkerTestSpec( baseTestString() + " -G Standard -XA FisherStrand -XA ReadPosRankSumTest --variant " + privateTestDir + "vcfexample2empty.vcf -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -L 1:10,020,000-10,021,000", 1, - Arrays.asList("e17596007d0db7673d138a9ae4890e82")); + Arrays.asList("552c2ad9dbfaa85d51d2def93c8229c6")); executeTest("test exclude annotations", spec); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotatorUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotatorUnitTest.java new file mode 100644 index 000000000..6d6761f1c --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotatorUnitTest.java @@ -0,0 +1,164 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.annotator; + +import net.sf.picard.reference.IndexedFastaSequenceFile; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.commandline.RodBinding; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; +import org.broadinstitute.sting.utils.variant.GATKVariantContextUtils; +import org.broadinstitute.variant.variantcontext.VariantContext; +import org.broadinstitute.variant.variantcontext.VariantContextBuilder; +import org.broadinstitute.variant.vcf.VCFConstants; +import org.testng.Assert; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.io.File; +import java.io.FileNotFoundException; +import java.util.*; + +public class VariantOverlapAnnotatorUnitTest extends BaseTest { + private GenomeLocParser genomeLocParser; + private IndexedFastaSequenceFile seq; + + @BeforeClass + public void setup() throws FileNotFoundException { + // sequence + seq = new CachingIndexedFastaSequenceFile(new File(b37KGReference)); + genomeLocParser = new GenomeLocParser(seq); + } + + private VariantContext makeVC(final String source, final String id, final List alleles) { + final VariantContext vc = GATKVariantContextUtils.makeFromAlleles(source, "20", 10, alleles); + return new VariantContextBuilder(vc).id(id).make(); + } + + private VariantOverlapAnnotator makeAnnotator(final String dbSNP, final String ... overlaps) { + final RodBinding dbSNPBinding = dbSNP == null ? null : new RodBinding<>(VariantContext.class, dbSNP); + final Map, String> overlapBinding = new LinkedHashMap<>(); + for ( final String overlap : overlaps ) overlapBinding.put(new RodBinding<>(VariantContext.class, overlap), overlap); + return new VariantOverlapAnnotator(dbSNPBinding, overlapBinding, genomeLocParser); + } + + @Test + public void testCreateWithSpecialNames() { + final List names = Arrays.asList("X", "Y", "Z"); + final Map, String> overlapBinding = new LinkedHashMap<>(); + for ( final String overlap : names ) overlapBinding.put(new RodBinding<>(VariantContext.class, overlap + "Binding"), overlap); + final VariantOverlapAnnotator annotator = new VariantOverlapAnnotator(null, overlapBinding, genomeLocParser); + Assert.assertEquals(annotator.getOverlapNames(), names); + } + + @DataProvider(name = "AnnotateRsIDData") + public Object[][] makeAnnotateRsIDData() { + List tests = new ArrayList<>(); + + // this functionality can be adapted to provide input data for whatever you might want in your data + final VariantContext callNoIDAC = makeVC("call", VCFConstants.EMPTY_ID_FIELD, Arrays.asList("A", "C")); + final VariantContext callNoIDAT = makeVC("call", VCFConstants.EMPTY_ID_FIELD, Arrays.asList("A", "T")); + final VariantContext callIDAC = makeVC("call", "foo", Arrays.asList("A", "C")); + final VariantContext callExistingIDAC = makeVC("call", "rsID1", Arrays.asList("A", "C")); + + final VariantContext dbSNP_AC = makeVC("DBSNP", "rsID1", Arrays.asList("A", "C")); + final VariantContext dbSNP_AT = makeVC("DBSNP", "rsID2", Arrays.asList("A", "T")); + final VariantContext dbSNP_AG = makeVC("DBSNP", "rsID3", Arrays.asList("A", "G")); + final VariantContext dbSNP_AC_AT = makeVC("DBSNP", "rsID1;rsID2", Arrays.asList("A", "C", "T")); + final VariantContext dbSNP_AC_AG = makeVC("DBSNP", "rsID1;rsID3", Arrays.asList("A", "C", "G")); + + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AC), dbSNP_AC.getID(), true}); + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AT), VCFConstants.EMPTY_ID_FIELD, false}); + tests.add(new Object[]{callIDAC, Arrays.asList(dbSNP_AC), "foo" + ";" + dbSNP_AC.getID(), true}); + tests.add(new Object[]{callIDAC, Arrays.asList(dbSNP_AT), "foo", false}); + tests.add(new Object[]{callExistingIDAC, Arrays.asList(dbSNP_AC), "rsID1", true}); + tests.add(new Object[]{callExistingIDAC, Arrays.asList(dbSNP_AT), "rsID1", false}); + + final VariantContext callNoIDACT = makeVC("call", VCFConstants.EMPTY_ID_FIELD, Arrays.asList("A", "C", "T")); + tests.add(new Object[]{callNoIDACT, Arrays.asList(dbSNP_AC), dbSNP_AC.getID(), true}); + tests.add(new Object[]{callNoIDACT, Arrays.asList(dbSNP_AT), dbSNP_AT.getID(), true}); + tests.add(new Object[]{callNoIDACT, Arrays.asList(dbSNP_AG), VCFConstants.EMPTY_ID_FIELD, false}); + tests.add(new Object[]{callNoIDACT, Arrays.asList(dbSNP_AC_AT), dbSNP_AC_AT.getID(), true}); + tests.add(new Object[]{callNoIDACT, Arrays.asList(dbSNP_AC_AG), dbSNP_AC_AG.getID(), true}); + + // multiple options + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AC, dbSNP_AT), "rsID1", true}); + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AT, dbSNP_AC), "rsID1", true}); + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AC_AT), "rsID1;rsID2", true}); + tests.add(new Object[]{callNoIDAT, Arrays.asList(dbSNP_AC_AT), "rsID1;rsID2", true}); + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AC_AG), "rsID1;rsID3", true}); + tests.add(new Object[]{callNoIDAT, Arrays.asList(dbSNP_AC_AG), VCFConstants.EMPTY_ID_FIELD, false}); + + final VariantContext dbSNP_AC_FAIL = new VariantContextBuilder(makeVC("DBSNP", "rsID1", Arrays.asList("A", "C"))).filter("FAIL").make(); + tests.add(new Object[]{callNoIDAC, Arrays.asList(dbSNP_AC_FAIL), VCFConstants.EMPTY_ID_FIELD, false}); + + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "AnnotateRsIDData") + public void testAnnotateRsID(final VariantContext toAnnotate, final List dbSNPRecords, final String expectedID, final boolean expectOverlap) throws Exception { + final VariantOverlapAnnotator annotator = makeAnnotator("dbnsp"); + final VariantContext annotated = annotator.annotateRsID(dbSNPRecords, toAnnotate); + Assert.assertNotNull(annotated); + Assert.assertEquals(annotated.getID(), expectedID); + } + + @Test(dataProvider = "AnnotateRsIDData") + public void testAnnotateOverlaps(final VariantContext toAnnotate, final List records, final String expectedID, final boolean expectOverlap) throws Exception { + final String name = "binding"; + final VariantOverlapAnnotator annotator = makeAnnotator(null, name); + final VariantContext annotated = annotator.annotateOverlap(records, name, toAnnotate); + Assert.assertNotNull(annotated); + Assert.assertEquals(annotated.getID(), toAnnotate.getID(), "Shouldn't modify annotation"); + Assert.assertEquals(annotated.hasAttribute(name), expectOverlap); + if ( expectOverlap ) { + Assert.assertEquals(annotated.getAttribute(name), true); + } + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/beagle/BeagleIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/beagle/BeagleIntegrationTest.java index 69a5fc65f..5601d66fb 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/beagle/BeagleIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/beagle/BeagleIntegrationTest.java @@ -62,7 +62,7 @@ public class BeagleIntegrationTest extends WalkerTest { "--beagleR2:BEAGLE " + beagleValidationDataLocation + "inttestbgl.r2 " + "--beagleProbs:BEAGLE " + beagleValidationDataLocation + "inttestbgl.gprobs " + "--beaglePhased:BEAGLE " + beagleValidationDataLocation + "inttestbgl.phased " + - "-o %s --no_cmdline_in_header -U LENIENT_VCF_PROCESSING", 1, Arrays.asList("c5522304abf0633041c7772dd7dafcea")); + "-o %s --no_cmdline_in_header -U LENIENT_VCF_PROCESSING", 1, Arrays.asList("989449fa3e262b88ba126867fa3ad9fb")); spec.disableShadowBCF(); executeTest("test BeagleOutputToVCF", spec); } @@ -96,7 +96,7 @@ public class BeagleIntegrationTest extends WalkerTest { "--beagleR2:beagle /humgen/gsa-hpprojects/GATK/data/Validation_Data/EUR_beagle_in_test.r2 "+ "--beagleProbs:beagle /humgen/gsa-hpprojects/GATK/data/Validation_Data/EUR_beagle_in_test.gprobs.bgl "+ "--beaglePhased:beagle /humgen/gsa-hpprojects/GATK/data/Validation_Data/EUR_beagle_in_test.phased.bgl "+ - "-L 20:1-70000 -o %s --no_cmdline_in_header -U LENIENT_VCF_PROCESSING",1,Arrays.asList("d8906b67c7f9fdb5b37b8e9e050982d3")); + "-L 20:1-70000 -o %s --no_cmdline_in_header -U LENIENT_VCF_PROCESSING",1,Arrays.asList("e036636fcd6a748ede4a70ea47941d47")); spec.disableShadowBCF(); executeTest("testBeagleChangesSitesToRef",spec); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariatesIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariatesIntegrationTest.java new file mode 100644 index 000000000..95ce80848 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/AnalyzeCovariatesIntegrationTest.java @@ -0,0 +1,362 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ +package org.broadinstitute.sting.gatk.walkers.bqsr; + +import org.broadinstitute.sting.WalkerTest; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.io.File; +import java.io.IOException; +import java.lang.reflect.Method; +import java.util.*; + +import static org.testng.Assert.assertTrue; + +/** + * Tests Analyze Covariates. + *

+ * Notice that since PDF report generated by R are different every-time this program + * is executed their content won't be tested. It only will verify that file has a healthy size. + * + */ +public class AnalyzeCovariatesIntegrationTest extends WalkerTest { + + private static final String TOOL_NAME = AnalyzeCovariates.class.getSimpleName(); + + /** + * Directory where the testdata is located. + */ + private static final File TEST_DATA_DIR = new File(privateTestDir,"AnalyzeCovariates"); + + /** + * File containing the before report for normal testing. + */ + private static final File BEFORE_FILE = new File(TEST_DATA_DIR,"before.table"); + + /** + * File containing the after report for normal testing. + */ + private static final File AFTER_FILE = new File(TEST_DATA_DIR,"after.table"); + + + /** + * File containing the bqsr report for normal testing. + */ + private static final File BQSR_FILE = new File(TEST_DATA_DIR,"bqsr.table"); + + /** + * Test the content of the generated csv file. + * + * @throws IOException should never happen. It would be an indicator of a + * problem with the testing environment. + */ + @Test(enabled = true) + public void testCsvGeneration() + throws IOException { + + final WalkerTestSpec spec = new WalkerTestSpec( + buildCommandLine("%s",null,true,true,true), + Collections.singletonList("106709d32e6f0a0a9dd6a6340ec246ab")); + executeTest("testCsvGeneration",spec); + } + + + /** + * Test the size of the generated pdf. + *

+ * Unfortunately we cannot test the content as it changes slightly + * every time the tool is run. + * + * @throws IOException should never happen. It would be an + * indicator of a problem with the testing environment. + */ + @Test(enabled = true) + public void testPdfGeneration() + throws IOException { + final File pdfFile = File.createTempFile("ACTest",".pdf"); + pdfFile.delete(); + pdfFile.deleteOnExit(); + + final List md5 = Collections.emptyList(); + final WalkerTestSpec spec = new WalkerTestSpec( + buildCommandLine(null,pdfFile.toString(),true,true,true),md5); + executeTest("testPdfGeneration",spec); + assertTrue(pdfFile.exists(),"the pdf file was not created"); + assertTrue(pdfFile.length() > 260000,"the pdf file size does" + + " not reach the minimum of 260Kb"); + } + + /** + * Test the effect of changing some recalibration parameters. + * @param afterFileName name of the alternative after recalibration file. + * @param description describes what has been changed. + * @throws IOException should never happen. It would be an + * indicator of a problem with the testing environment. + */ + @Test(enabled = true, dataProvider="alternativeAfterFileProvider") + public void testParameterChangeException(final String afterFileName, + final String description) + throws IOException { + + final File pdfFile = File.createTempFile("ACTest",".pdf"); + pdfFile.deleteOnExit(); + final List md5 = Collections.emptyList(); + final File afterFile = new File(TEST_DATA_DIR,afterFileName); + final WalkerTestSpec spec = new WalkerTestSpec( + buildCommandLine(null,"%s",true,true,afterFile), + 1,UserException.IncompatibleRecalibrationTableParameters.class); + executeTest("testParameterChangeException - " + description, spec); + } + + + /** + * Test combinations of input and output inclusion exclusion of the command + * line that cause an exception to be thrown. + * + * @param useCsvFile whether to include the output csv file. + * @param usePdfFile whether to include the output pdf file. + * @param useBQSRFile whether to include the -BQSR input file. + * @param useBeforeFile whether to include the -before input file. + * @param useAfterFile whether to include the -after input file. + * @throws IOException never thrown, unless there is a problem with the testing environment. + */ + @Test(enabled = true, dataProvider="alternativeInOutAbsenceCombinations") + public void testInOutAbsenceException(final boolean useCsvFile, final boolean usePdfFile, + final boolean useBQSRFile, final boolean useBeforeFile, final boolean useAfterFile) + throws IOException { + final WalkerTestSpec spec = new WalkerTestSpec(buildCommandLine(useCsvFile,usePdfFile, + useBQSRFile,useBeforeFile,useAfterFile),0,UserException.class); + executeTest("testInOutAbsencePresenceException", spec); + } + + /** + * Test combinations of input and output inclusion exclusion of the + * command line that won't cause an exception. + * + * @param useCsvFile whether to include the output csv file. + * @param usePdfFile whether to include the output pdf file. + * @param useBQSRFile whether to include the -BQSR input file. + * @param useBeforeFile whether to include the -before input file. + * @param useAfterFile whether to include the -after input file. + * @throws IOException never thrown, unless there is a problem with the testing environment. + */ + @Test(enabled = true, dataProvider="alternativeInOutAbsenceCombinations") + public void testInOutAbsence(final boolean useCsvFile, final boolean usePdfFile, + final boolean useBQSRFile, final boolean useBeforeFile, final boolean useAfterFile) + throws IOException { + final List md5 = Collections.emptyList(); + final WalkerTestSpec spec = new WalkerTestSpec(buildCommandLine(useCsvFile,usePdfFile, + useBQSRFile,useBeforeFile,useAfterFile),md5); + executeTest("testInOutAbsencePresence", spec); + } + + + + @DataProvider + public Iterator alternativeInOutAbsenceCombinations(Method m) { + List result = new LinkedList(); + if (m.getName().endsWith("Exception")) { + result.add(new Object[] { false, false, true, true, true }); + result.add(new Object[] { true, true, false, false ,false}); + } + else { + result.add(new Object[] { true, true, true, false, false }); + result.add(new Object[] { true, true, false, true, false }); + result.add(new Object[] { true, true, false, false, true }); + result.add(new Object[] { true, false,false, true, false }); + result.add(new Object[] { false, true, true, false, false }); + + } + return result.iterator(); + } + + /** + * Provide recalibration parameter change data to relevant tests. + * @param m target test method. + * @return never null. + */ + @DataProvider + public Iterator alternativeAfterFileProvider (Method m) { + final boolean expectsException = m.getName().endsWith("Exception"); + final List result = new LinkedList(); + for (final Object[] data : DIFFERENT_PARAMETERS_AFTER_FILES) { + if (data[1].equals(expectsException)) { + result.add(new Object[] { data[0], data[2] }); + } + } + return result.iterator(); + } + + /** + * Triplets < alfter-grp-file, whether it should fail, what is different > + */ + private final Object[][] DIFFERENT_PARAMETERS_AFTER_FILES = { + {"after-cov.table", true, "Adds additional covariate: repeat-length" }, + {"after-dpSOLID.table", true, "Change the default platform to SOLID" }, + {"after-noDp.table",true, "Unset the default platform" }, + {"after-mcs4.table", true, "Changed -mcs parameter from 2 to 4" } + }; + + /** + * Build the AC command line given what combinations of input and output files should be included. + * + * @param useCsvFile whether to include the output csv file. + * @param usePdfFile whether to include the output pdf file. + * @param useBQSRFile whether to include the -BQSR input file. + * @param useBeforeFile whether to include the -before input file. + * @param useAfterFile whether to include the -after input file. + * @return never null. + * @throws IOException never thrown, unless there is a problem with the testing environment. + */ + private String buildCommandLine(final boolean useCsvFile, final boolean usePdfFile, + final boolean useBQSRFile, final boolean useBeforeFile, final boolean useAfterFile) + throws IOException { + + final File csvFile = useCsvFile ? File.createTempFile("ACTest",".csv") : null; + final File pdfFile = usePdfFile ? File.createTempFile("ACTest",".pdf") : null; + + if (csvFile != null) { + csvFile.deleteOnExit(); + } + + if (pdfFile != null) { + pdfFile.deleteOnExit(); + } + + return buildCommandLine(csvFile == null ? null : csvFile.toString(), + pdfFile == null ? null : pdfFile.toString(), + useBQSRFile,useBeforeFile,useAfterFile); + } + + /** + * Build the AC command line given the output file names explicitly and what test input files to use. + *

+ * + * @param csvFileName the csv output file, null if none should be provided. + * @param pdfFileName the plots output file, null if none should be provided. + * @param useBQSRFile whether to include the -BQSR input file. + * @param useBeforeFile whether to include the -before input file. + * @param useAfterFile whether to include the -after input file. + * + * @return never null. + */ + private String buildCommandLine(final String csvFileName, final String pdfFileName, final boolean useBQSRFile, + final boolean useBeforeFile, final boolean useAfterFile) { + return buildCommandLine(csvFileName,pdfFileName,useBQSRFile ? BQSR_FILE : null, + useBeforeFile ? BEFORE_FILE : null, + useAfterFile ? AFTER_FILE : null); + } + + /** + * Build the AC command line given the output file names and the after file name explicitly and what other + * test input files to use. + *

+ * + * @param csvFileName the csv output file, null if none should be provided. + * @param pdfFileName the plots output file, null if none should be provided. + * @param useBQSRFile whether to include the -BQSR input file. + * @param useBeforeFile whether to include the -before input file. + * @param afterFile the after input report file, null if none should be provided. + * + * @return never null. + */ + private String buildCommandLine(final String csvFileName, final String pdfFileName, final boolean useBQSRFile, + final boolean useBeforeFile, final File afterFile) { + return buildCommandLine(csvFileName,pdfFileName,useBQSRFile ? BQSR_FILE : null, + useBeforeFile ? BEFORE_FILE : null, + afterFile); + } + + /** + * Build the AC command line given the output file names and the after file name explicitly and what other + * test input files to use. + *

+ * + * @param csvFileName the csv output file, null if none should be provided. + * @param pdfFileName the plots output file, null if none should be provided. + * @param bqsrFile the BQSR input report file, null if none should be provided. + * @param beforeFile the before input report file, null if non should be provided. + * @param afterFile the after input report file, null if none should be provided. + * + * @return never null. + */ + private String buildCommandLine(final String csvFileName, final String pdfFileName, final File bqsrFile, + final File beforeFile, final File afterFile) { + + final List args = new LinkedList(); + args.add("-T"); + args.add(TOOL_NAME); + args.add("-R"); + args.add(hg19Reference); + args.add("-ignoreLMT"); + + if (csvFileName != null) { + args.add("-" + AnalyzeCovariates.CSV_ARG_SHORT_NAME); + args.add("'" + csvFileName + "'"); + } + if (pdfFileName != null) { + args.add("-" + AnalyzeCovariates.PDF_ARG_SHORT_NAME); + args.add("'" + pdfFileName + "'"); + } + if (bqsrFile != null) { + args.add("-BQSR"); + args.add("'" + bqsrFile.getAbsoluteFile().toString() + "'"); + } + if (beforeFile != null) { + args.add("-" + AnalyzeCovariates.BEFORE_ARG_SHORT_NAME); + args.add("'" + beforeFile.getAbsolutePath().toString() + "'"); + } + if (afterFile != null) { + args.add("-" + AnalyzeCovariates.AFTER_ARG_SHORT_NAME); + args.add("'" + afterFile.getAbsolutePath().toString() + "'"); + } + return Utils.join(" ", args); + + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRIntegrationTest.java index 907046704..05183a521 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/bqsr/BQSRIntegrationTest.java @@ -100,23 +100,23 @@ public class BQSRIntegrationTest extends WalkerTest { @DataProvider(name = "BQSRTest") public Object[][] createBQSRTestData() { return new Object[][]{ - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, "", "61fd466b5e94d2d67e116f6f67c9f939")}, - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --no_standard_covs -cov ContextCovariate", "e08b5bcdb64f4beea03730e5631a14ca")}, - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --no_standard_covs -cov CycleCovariate", "448a45dc154c95d1387cb5cdddb67071")}, - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --indels_context_size 4", "c1e7999e445d51bbe2e775dac5325643")}, - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --low_quality_tail 5", "a57c16918cdfe12d55a89c21bf195279")}, - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --quantizing_levels 6", "836dccacf48ccda6b2843d07e8f1ef4d")}, - {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --mismatches_context_size 4", "0fb2aedc2f8d66b5821cb570f15a8c4d")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", "", "c9953f020a65c1603a6d71aeeb1b95f3")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA19240.chr1.BFAST.SOLID.bam", "1:10,000,000-10,200,000", "", "85a120b7d86b61597b86b9e93decbdfc")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA12873.454.SRP000031.2009_06.chr1.10_20mb.1RG.bam", "1:10,000,000-10,200,000", "", "5248dc49aec0323c74b496bb4928c73c")}, - {new BQSRTest(b36KGReference, validationDataLocation + "originalQuals.1kg.chr1.1-1K.1RG.bam", "1:1-1,000", " -OQ", "cb52f267e0010f849f50b0bf1de474a1")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA19240.chr1.BFAST.SOLID.bam", "1:10,000,000-20,000,000", " --solid_recal_mode REMOVE_REF_BIAS", "1425a5063ee757dbfc013df24e65a67a")}, - {new BQSRTest(b36KGReference, privateTestDir + "NA19240.chr1.BFAST.SOLID.hasCSNoCall.bam", "1:50,000-80,000", " --solid_nocall_strategy LEAVE_READ_UNRECALIBRATED", "c1c3cda8caceed619d3d439c3990cd26")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", " -knownSites:anyNameABCD,VCF " + privateTestDir + "vcfexample3.vcf", "c9953f020a65c1603a6d71aeeb1b95f3")}, - {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", " -knownSites:bed " + validationDataLocation + "bqsrKnownTest.bed", "5bfff0c699345cca12a9b33acf95588f")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, "", "f805a0020eea987b79f314fa99913806")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --no_standard_covs -cov ContextCovariate", "86075d3856eb06816a0dd81af55e421f")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --no_standard_covs -cov CycleCovariate", "155802237e1fc7a001398b8f4bcf4b72")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --indels_context_size 4", "38c7916cc019fe8d134df67639422b42")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --low_quality_tail 5", "b74e75f3c5aa90bd21af1e20f2ac8c40")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --quantizing_levels 6", "e564505aea11464de8ed72890d9ea89a")}, + {new BQSRTest(hg18Reference, HiSeqBam, HiSeqInterval, " --mismatches_context_size 4", "380d8be121ffaddd3461ee0ac3d1a76f")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", "", "0b5a8e259e997e4c7b5836d4c28e6f4d")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA19240.chr1.BFAST.SOLID.bam", "1:10,000,000-10,200,000", "", "281682124584ab384f23359934df0c3b")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA12873.454.SRP000031.2009_06.chr1.10_20mb.1RG.bam", "1:10,000,000-10,200,000", "", "0a92fdff5fd26227c29d34eda5a32f49")}, + {new BQSRTest(b36KGReference, validationDataLocation + "originalQuals.1kg.chr1.1-1K.1RG.bam", "1:1-1,000", " -OQ", "90d8c24077e8ae9a0037a9aad5f09e31")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA19240.chr1.BFAST.SOLID.bam", "1:10,000,000-20,000,000", " --solid_recal_mode REMOVE_REF_BIAS", "c41ef02c640ef1fed4bfc03b9b33b616")}, + {new BQSRTest(b36KGReference, privateTestDir + "NA19240.chr1.BFAST.SOLID.hasCSNoCall.bam", "1:50,000-80,000", " --solid_nocall_strategy LEAVE_READ_UNRECALIBRATED", "b577cd1d529425f66db49620db09fdca")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", " -knownSites:anyNameABCD,VCF " + privateTestDir + "vcfexample3.vcf", "0b5a8e259e997e4c7b5836d4c28e6f4d")}, + {new BQSRTest(b36KGReference, validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.1Mb.1RG.bam", "1:10,000,000-10,200,000", " -knownSites:bed " + validationDataLocation + "bqsrKnownTest.bed", "9ad49269c0156f8ab1173261bf23e600")}, // make sure we work with ION torrent bam - {new BQSRTest(b37KGReference, privateTestDir + "iontorrent.bam", "20:10,000,000-10,200,000", "", "7375c7b692e76b651c278a9fb478fa1c")}, + {new BQSRTest(b37KGReference, privateTestDir + "iontorrent.bam", "20:10,000,000-10,200,000", "", "04bfa4760767022e7f5252e6e4432cc1")}, }; } @@ -141,22 +141,6 @@ public class BQSRIntegrationTest extends WalkerTest { executeTest("testBQSRFailWithoutDBSNP", spec); } - @Test - public void testBQSRCSV() { - WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( - " -T BaseRecalibrator" + - " -R " + b36KGReference + - " -I " + validationDataLocation + "NA12892.SLX.SRP000031.2009_06.selected.bam" + - " -knownSites " + b36dbSNP129 + - " -L 1:10,000,000-10,200,000" + - " -o /dev/null" + - " -sortAllCols" + - " --plot_pdf_file /dev/null" + - " --intermediate_csv_file %s", - Arrays.asList("90ad19143024684e3c4410dc8fd2bd9d")); - executeTest("testBQSR-CSVfile", spec); - } - @Test public void testBQSRFailWithSolidNoCall() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReadsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReadsIntegrationTest.java index 405e616f1..4fbbe1d0c 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReadsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/ReduceReadsIntegrationTest.java @@ -260,7 +260,7 @@ public class ReduceReadsIntegrationTest extends WalkerTest { public void testDivideByZero() { String base = String.format("-T ReduceReads %s -npt -R %s -I %s", DIVIDEBYZERO_L, REF, DIVIDEBYZERO_BAM) + " -o %s "; // we expect to lose coverage due to the downsampling so don't run the systematic tests - executeTestWithoutAdditionalRRTests("testDivideByZero", new WalkerTestSpec(base, Arrays.asList("bam"), Arrays.asList("c459a6153a17c2cbf8441e1918fda9c8"))); + executeTestWithoutAdditionalRRTests("testDivideByZero", new WalkerTestSpec(base, Arrays.asList("bam"), Arrays.asList("4f0ef477c0417d1eb602b323474ef377"))); } /** diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java index 56ad02084..bd0a8933c 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SlidingWindowUnitTest.java @@ -89,6 +89,38 @@ public class SlidingWindowUnitTest extends BaseTest { return variantRegionBitset; } + ////////////////////////////////////////////////////////////////////////////////////// + //// Test for leading softclips immediately followed by an insertion in the CIGAR //// + ////////////////////////////////////////////////////////////////////////////////////// + + @Test(enabled = true) + public void testLeadingSoftClipThenInsertion() { + + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 1, 10); + read.setReadBases(Utils.dupBytes((byte) 'A', 10)); + read.setBaseQualities(Utils.dupBytes((byte)30, 10)); + read.setMappingQuality(30); + read.setCigarString("2S2I6M"); + + final SlidingWindow slidingWindow = new SlidingWindow("1", 0, 1); + slidingWindow.addRead(read); + slidingWindow.close(null); + } + + @Test(enabled = true) + public void testLeadingHardClipThenInsertion() { + + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 1, 8); + read.setReadBases(Utils.dupBytes((byte) 'A', 8)); + read.setBaseQualities(Utils.dupBytes((byte)30, 8)); + read.setMappingQuality(30); + read.setCigarString("2H2I6M"); + + final SlidingWindow slidingWindow = new SlidingWindow("1", 0, 10, header, new GATKSAMReadGroupRecord("test"), 0, 0.05, 0.05, 0.05, 20, 20, 100, ReduceReads.DownsampleStrategy.Normal, false); + slidingWindow.addRead(read); + slidingWindow.close(null); + } + ////////////////////////////////////////////////////////////////////////////////////// //// This section tests the findVariantRegions() method and related functionality //// ////////////////////////////////////////////////////////////////////////////////////// @@ -221,6 +253,33 @@ public class SlidingWindowUnitTest extends BaseTest { return count; } + @Test(enabled = true) + public void testMarkingRegionInCancerMode() { + + final int contextSize = 10; + final SlidingWindow slidingWindow = new SlidingWindow("1", 0, contextSize, header, new GATKSAMReadGroupRecord("test"), 0, 0.05, 0.05, 0.05, 20, 20, 100, ReduceReads.DownsampleStrategy.Normal, false); + slidingWindow.addRead(createSimpleRead("1", 0, 34, 75)); + slidingWindow.addRead(createSimpleRead("2", 0, 97, 73)); + slidingWindow.addRead(createSimpleRead("3", 0, 98, 75)); + slidingWindow.addRead(createSimpleRead("4", 0, 98, 75)); + slidingWindow.addRead(createSimpleRead("5", 0, 98, 75)); + + final CompressionStash regions = new CompressionStash(); + regions.add(new FinishedGenomeLoc("1", 0, 89, 109, true)); + + slidingWindow.closeVariantRegions(regions, null, false); + Assert.assertEquals(slidingWindow.getMarkedSitesForTesting().getVariantSiteBitSet().length, 76 + contextSize); + } + + private GATKSAMRecord createSimpleRead(final String name, final int refIndex, final int alignmentStart, final int length) { + + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, name, refIndex, alignmentStart, length); + read.setReadBases(Utils.dupBytes((byte) 'A', length)); + read.setBaseQualities(Utils.dupBytes((byte) 30, length)); + read.setMappingQuality(60); + return read; + } + ///////////////////////////////////////////////////////////////// //// This section tests the consensus creation functionality //// diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java index 88506fda3..aaa3b1284 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite1IntegrationTest.java @@ -74,11 +74,11 @@ public class UnifiedGenotyperGeneralPloidySuite1IntegrationTest extends WalkerTe @Test(enabled = true) public void testINDEL_GGA_Pools() { - executor.PC_LSV_Test(String.format(" -maxAltAlleles 1 -ploidy 24 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles %s", LSV_ALLELES), "LSV_INDEL_GGA", "INDEL", "3f7d763c654f1d708323f369ea4a099b"); + executor.PC_LSV_Test(String.format(" -maxAltAlleles 1 -ploidy 24 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles %s", LSV_ALLELES), "LSV_INDEL_GGA", "INDEL", "ceb105e3db0f2b993e3d725b0d60b6a3"); } @Test(enabled = true) public void testINDEL_maxAltAlleles2_ploidy1_Pools_noRef() { - executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 1", "LSV_INDEL_DISC_NOREF_p1", "INDEL", "66a5a3eb657fac5c621bc0c228ea9caf"); + executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 1", "LSV_INDEL_DISC_NOREF_p1", "INDEL", "98f4d78aad745c6e853b81b2e4e207b4"); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java index 64568d714..0eb89adc7 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperGeneralPloidySuite2IntegrationTest.java @@ -58,7 +58,7 @@ public class UnifiedGenotyperGeneralPloidySuite2IntegrationTest extends WalkerTe @Test(enabled = true) public void testINDEL_maxAltAlleles2_ploidy3_Pools_noRef() { - executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 3","LSV_INDEL_DISC_NOREF_p3","INDEL","5eabc12fc7b4f9749e6d1be0f5b45d14"); + executor.PC_LSV_Test_NoRef(" -maxAltAlleles 2 -ploidy 3","LSV_INDEL_DISC_NOREF_p3","INDEL","25902d7a6a0c00c60c2d5845dfaa1a4c"); } @Test(enabled = true) diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java index 856e97ebe..65a569cdc 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIndelCallingIntegrationTest.java @@ -73,7 +73,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { " -o %s" + " -L 1:10,000,000-10,500,000", 1, - Arrays.asList("19f77f557150905ef3fa4713f611a1b9")); + Arrays.asList("ef8151aa699da3272c1ae0986d16ca21")); executeTest(String.format("test indel caller in SLX"), spec); } @@ -88,7 +88,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { " -minIndelCnt 1" + " -L 1:10,000,000-10,100,000", 1, - Arrays.asList("d9572a227ccb13a6baa6dc4fb65bc1e5")); + Arrays.asList("7f88229ccefb74513efb199b61183cb8")); executeTest(String.format("test indel caller in SLX with low min allele count"), spec); } @@ -101,7 +101,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { " -o %s" + " -L 1:10,000,000-10,500,000", 1, - Arrays.asList("bb3dbad9666ebf38d338f0c9c211a42e")); + Arrays.asList("1928ad48bcd0ca180e046bc235cfb3f4")); executeTest(String.format("test indel calling, multiple technologies"), spec); } @@ -111,7 +111,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( baseCommandIndels + " --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + privateTestDir + "indelAllelesForUG.vcf -I " + validationDataLocation + "pilot2_daughters.chr20.10k-11k.bam -o %s -L 20:10,000,000-10,100,000", 1, - Arrays.asList("8052390ca2b6a57c3ddf379a51225d64")); + Arrays.asList("6663e434a7b549f23bfd52db90e53a1a")); executeTest("test MultiSample Pilot2 indels with alleles passed in", spec); } @@ -121,7 +121,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { baseCommandIndels + " --output_mode EMIT_ALL_SITES --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + privateTestDir + "indelAllelesForUG.vcf -I " + validationDataLocation + "pilot2_daughters.chr20.10k-11k.bam -o %s -L 20:10,000,000-10,100,000", 1, - Arrays.asList("b6b9dba97fbabaeeb458a41051983e7b")); + Arrays.asList("581c552664e536df6d0f102fb0d10e5a")); executeTest("test MultiSample Pilot2 indels with alleles passed in and emitting all sites", spec); } @@ -136,7 +136,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { WalkerTest.WalkerTestSpec spec2 = new WalkerTest.WalkerTestSpec( baseCommandIndels + " --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + result.get(0).getAbsolutePath() + " -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -o %s -L " + result.get(0).getAbsolutePath(), 1, - Arrays.asList("38730c7030271f5d0ca0b59365d57814")); + Arrays.asList("5596851d19582dd1af3901b7d703ae0a")); executeTest("test MultiSample Pilot1 CEU indels using GENOTYPE_GIVEN_ALLELES", spec2); } @@ -176,7 +176,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { public void testMinIndelFraction0() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( assessMinIndelFraction + " -minIndelFrac 0.0", 1, - Arrays.asList("264325878b988acc11d8e5d9d2ba0b7f")); + Arrays.asList("862d82c8aa35f1da4f9e67b5b48dfe52")); executeTest("test minIndelFraction 0.0", spec); } @@ -184,7 +184,7 @@ public class UnifiedGenotyperIndelCallingIntegrationTest extends WalkerTest { public void testMinIndelFraction25() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( assessMinIndelFraction + " -minIndelFrac 0.25", 1, - Arrays.asList("98abcfb0a008050eba8b9c285a25b2a0")); + Arrays.asList("8d9fc96be07db791737ac18135de4d63")); executeTest("test minIndelFraction 0.25", spec); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java index d55a923dc..532982853 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperIntegrationTest.java @@ -46,11 +46,15 @@ package org.broadinstitute.sting.gatk.walkers.genotyper; +import net.sf.samtools.util.BlockCompressedInputStream; +import org.broad.tribble.readers.AsciiLineReader; import org.broadinstitute.sting.WalkerTest; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.exceptions.UserException; +import org.testng.Assert; import org.testng.annotations.Test; +import java.io.File; import java.util.Arrays; import java.util.Collections; @@ -156,6 +160,14 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest { } + @Test + public void emitPLsAtAllSites() { + WalkerTest.WalkerTestSpec spec1 = new WalkerTest.WalkerTestSpec( + baseCommand + " -I " + validationDataLocation + "NA12878.1kg.p2.chr1_10mb_11_mb.SLX.bam -o %s -L 1:10,000,000-10,010,000 --output_mode EMIT_ALL_SITES -allSitePLs", 1, + Arrays.asList("7cc55db8693759e059a05bc4398f6f69")); + executeTest("test all site PLs 1", spec1); + + } // -------------------------------------------------------------------------------------------------------------- // // testing heterozygosity @@ -288,9 +300,24 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest { @Test public void testNsInCigar() { - WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( + final WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "testWithNs.bam -o %s -L 8:141813600-141813700 -out_mode EMIT_ALL_SITES", 1, - Arrays.asList("2ae3fd39c53a6954d32faed8703adfe8")); + UserException.UnsupportedCigarOperatorException.class); + executeTest("test calling on reads with Ns in CIGAR", spec); } + + @Test(enabled = true) + public void testCompressedVCFOutputWithNT() throws Exception { + WalkerTestSpec spec = new WalkerTestSpec("-T UnifiedGenotyper -R " + b37KGReference + " -I " + + privateTestDir + "PCRFree.2x250.Illumina.20_10_11.bam" + + " -o %s -L 20:10,000,000-10,100,000 -nt 4", + 1, Arrays.asList("vcf.gz"), Arrays.asList("")); + final File vcf = executeTest("testCompressedVCFOutputWithNT", spec).first.get(0); + final AsciiLineReader reader = new AsciiLineReader(new BlockCompressedInputStream(vcf)); + int nLines = 0; + while ( reader.readLine() != null ) + nLines++; + Assert.assertTrue(nLines > 0); + } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java index 907af0f34..1bfbbac17 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperNormalCallingIntegrationTest.java @@ -64,7 +64,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testMultiSamplePilot1() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( baseCommand + " -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -o %s -L 1:10,022,000-10,025,000", 1, - Arrays.asList("5e8f1fa88dc93320cc0e75e9fe6e153b")); + Arrays.asList("a9466c1e3ce1fc4bac83086b25a6df54")); executeTest("test MultiSample Pilot1", spec); } @@ -80,7 +80,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testWithAllelesPassedIn2() { WalkerTest.WalkerTestSpec spec2 = new WalkerTest.WalkerTestSpec( baseCommand + " --output_mode EMIT_ALL_SITES --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + privateTestDir + "allelesForUG.vcf -I " + validationDataLocation + "pilot2_daughters.chr20.10k-11k.bam -o %s -L 20:10,000,000-10,025,000", 1, - Arrays.asList("698e54aeae3130779d246b9480a4052c")); + Arrays.asList("3e646003c5b93da80c7d8e5d0ff2ee4e")); executeTest("test MultiSample Pilot2 with alleles passed in and emitting all sites", spec2); } @@ -96,7 +96,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testMultipleSNPAlleles() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -glm BOTH --dbsnp " + b37dbSNP129 + " -I " + privateTestDir + "multiallelic.snps.bam -o %s -L " + privateTestDir + "multiallelic.snps.intervals", 1, - Arrays.asList("1ab95513a3abb5b760578831c61ef94b")); + Arrays.asList("06c85e8eab08b67244cf38fc785aca22")); executeTest("test Multiple SNP alleles", spec); } @@ -112,7 +112,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testReverseTrim() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -glm INDEL -I " + validationDataLocation + "CEUTrio.HiSeq.b37.chr20.10_11mb.bam -o %s -L 20:10289124 -L 20:10090289", 1, - Arrays.asList("314b99eb146de1fdafed872ecbe1cfc2")); + Arrays.asList("f3da1ff1e49a831af055ca52d6d07dd7")); executeTest("test reverse trim", spec); } @@ -120,7 +120,7 @@ public class UnifiedGenotyperNormalCallingIntegrationTest extends WalkerTest{ public void testMismatchedPLs() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -glm INDEL -I " + privateTestDir + "mismatchedPLs.bam -o %s -L 1:24020341", 1, - Arrays.asList("94bfccbd06043e90ae1b1c66fc3afe07")); + Arrays.asList("20ff311f363c51b7385a76f6f296759c")); executeTest("test mismatched PLs", spec); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperReducedReadsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperReducedReadsIntegrationTest.java index 5f9667cca..33810e255 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperReducedReadsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/genotyper/UnifiedGenotyperReducedReadsIntegrationTest.java @@ -63,18 +63,18 @@ public class UnifiedGenotyperReducedReadsIntegrationTest extends WalkerTest { public void testReducedBam() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T UnifiedGenotyper --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "bamExample.ReducedRead.ADAnnotation.bam -o %s -L 1:67,225,396-67,288,518", 1, - Arrays.asList("e6565060b44a7804935973efcd56e596")); + Arrays.asList("ffde0d5e23523e4bd9e7e18f62d37d0f")); executeTest("test calling on a ReducedRead BAM", spec); } @Test public void testReducedBamSNPs() { - testReducedCalling("SNP", "ab776d74c41ce2b859e2b2466a76204a"); + testReducedCalling("SNP", "e8de8c523751ad2fa2ee20185ba5dea7"); } @Test public void testReducedBamINDELs() { - testReducedCalling("INDEL", "19bc6a74250ec19efc4e1b4ee6515ac0"); + testReducedCalling("INDEL", "4b4902327fb132f9aaab3dd5ace934e1"); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssemblerUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssemblerUnitTest.java index c5574577d..95592241d 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssemblerUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssemblerUnitTest.java @@ -73,8 +73,8 @@ public class DeBruijnAssemblerUnitTest extends BaseTest { public void testReferenceCycleGraph() { String refCycle = "ATCGAGGAGAGCGCCCCGAGATATATATATATATATTTGCGAGCGCGAGCGTTTTAAAAATTTTAGACGGAGAGATATATATATATGGGAGAGGGGATATATATATATCCCCCC"; String noCycle = "ATCGAGGAGAGCGCCCCGAGATATTATTTGCGAGCGCGAGCGTTTTAAAAATTTTAGACGGAGAGATGGGAGAGGGGATATATAATATCCCCCC"; - final DeBruijnGraph g1 = new DeBruijnAssembler().createGraphFromSequences(new ArrayList(), 10, new Haplotype(refCycle.getBytes(), true)); - final DeBruijnGraph g2 = new DeBruijnAssembler().createGraphFromSequences(new ArrayList(), 10, new Haplotype(noCycle.getBytes(), true)); + final DeBruijnGraph g1 = new DeBruijnAssembler().createGraphFromSequences(new ArrayList(), 10, new Haplotype(refCycle.getBytes(), true), Collections.emptyList()); + final DeBruijnGraph g2 = new DeBruijnAssembler().createGraphFromSequences(new ArrayList(), 10, new Haplotype(noCycle.getBytes(), true), Collections.emptyList()); Assert.assertTrue(g1 == null, "Reference cycle graph should return null during creation."); Assert.assertTrue(g2 != null, "Reference non-cycle graph should not return null during creation."); @@ -153,4 +153,47 @@ public class DeBruijnAssemblerUnitTest extends BaseTest { Assert.assertTrue(expectedBases.contains(new String(addedKmer.bases())), "Couldn't find kmer " + addedKmer + " among all expected kmers " + expectedBases); } } + + @DataProvider(name = "AddGGAKmersToGraph") + public Object[][] makeAddGGAKmersToGraphData() { + List tests = new ArrayList(); + + // this functionality can be adapted to provide input data for whatever you might want in your data + final String bases = "ACGTAACCGGTTAAACCCGGGTTT"; + final int readLen = bases.length(); + final List allBadStarts = new ArrayList(readLen); + for ( int i = 0; i < readLen; i++ ) allBadStarts.add(i); + + for ( final int kmerSize : Arrays.asList(3, 4, 5) ) { + tests.add(new Object[]{bases, kmerSize}); + } + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "AddGGAKmersToGraph", enabled = ! DEBUG) + public void testAddGGAKmersToGraph(final String bases, final int kmerSize) { + final int readLen = bases.length(); + final DeBruijnAssembler assembler = new DeBruijnAssembler(); + final MockBuilder builder = new MockBuilder(kmerSize); + + final Set expectedBases = new HashSet(); + final Set expectedStarts = new LinkedHashSet(); + for ( int i = 0; i < readLen; i++) { + boolean good = true; + for ( int j = 0; j < kmerSize + 1; j++ ) { // +1 is for pairing + good &= i + j < readLen; + } + if ( good ) { + expectedStarts.add(i); + expectedBases.add(bases.substring(i, i + kmerSize + 1)); + } + } + + assembler.addGGAKmersToGraph(builder, Arrays.asList(new Haplotype(bases.getBytes()))); + Assert.assertEquals(builder.addedPairs.size(), expectedStarts.size()); + for ( final Kmer addedKmer : builder.addedPairs ) { + Assert.assertTrue(expectedBases.contains(new String(addedKmer.bases())), "Couldn't find kmer " + addedKmer + " among all expected kmers " + expectedBases); + } + } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java index d6c6a4f33..0636d7c1b 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java @@ -64,7 +64,7 @@ public class HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest extends Wa @Test public void testHaplotypeCallerMultiSampleComplex1() { - HCTestComplexVariants(privateTestDir + "AFR.complex.variants.bam", "", "fc11b553fbf16beac0da04a69f419365"); + HCTestComplexVariants(privateTestDir + "AFR.complex.variants.bam", "", "4a3479fc4ad387d381593b328f737a1b"); } private void HCTestSymbolicVariants(String bam, String args, String md5) { @@ -88,12 +88,12 @@ public class HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest extends Wa @Test public void testHaplotypeCallerMultiSampleGGAComplex() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:119673-119823 -L 20:121408-121538", - "90cbcc7e959eb591fb7c5e12d65e0e40"); + "b7a01525c00d02b3373513a668a43c6a"); } @Test public void testHaplotypeCallerMultiSampleGGAMultiAllelic() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:133041-133161 -L 20:300207-300337", - "50894abb9d156bf480881cb5cb2a8a7d"); + "a2a42055b068334f415efb07d6bb9acd"); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java index 15516d090..aca1172d4 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java @@ -47,15 +47,12 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import net.sf.picard.reference.IndexedFastaSequenceFile; -import org.broad.tribble.TribbleIndexedFeatureReader; import org.broadinstitute.sting.WalkerTest; -import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLocParser; import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.sting.utils.variant.GATKVCFUtils; import org.broadinstitute.variant.variantcontext.VariantContext; -import org.broadinstitute.variant.vcf.VCFCodec; import org.testng.annotations.Test; import java.io.File; @@ -69,6 +66,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { final static String NA12878_CHR20_BAM = validationDataLocation + "NA12878.HiSeq.WGS.bwa.cleaned.recal.hg19.20.bam"; final static String CEUTRIO_BAM = validationDataLocation + "CEUTrio.HiSeq.b37.chr20.10_11mb.bam"; final static String NA12878_RECALIBRATED_BAM = privateTestDir + "NA12878.100kb.BQSRv2.example.bam"; + final static String NA12878_PCRFREE = privateTestDir + "PCRFree.2x250.Illumina.20_10_11.bam"; final static String CEUTRIO_MT_TEST_BAM = privateTestDir + "CEUTrio.HiSeq.b37.MT.1_50.bam"; final static String INTERVALS_FILE = validationDataLocation + "NA12878.HiSeq.b37.chr20.10_11mb.test.intervals"; @@ -80,12 +78,12 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSample() { - HCTest(CEUTRIO_BAM, "", "37e462379de17bc6c8aeeed6e9735dd3"); + HCTest(CEUTRIO_BAM, "", "baa5a2eedc8f06ce9f8f98411ee09f8a"); } @Test public void testHaplotypeCallerSingleSample() { - HCTest(NA12878_BAM, "", "983a0d122714d4aa0ff7af20cc686703"); + HCTest(NA12878_BAM, "", "f09e03d41238697b23f95716a12667cb"); } @Test(enabled = false) // can't annotate the rsID's yet @@ -96,12 +94,12 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSampleGGA() { HCTest(CEUTRIO_BAM, "--max_alternate_alleles 3 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles " + validationDataLocation + "combined.phase1.chr20.raw.indels.sites.vcf", - "dbbc884a975587d8e7255ce47b58f438"); + "130d36448faeb7b8d4bce4be12dacd3a"); } @Test public void testHaplotypeCallerInsertionOnEdgeOfContig() { - HCTest(CEUTRIO_MT_TEST_BAM, "-dcov 90 -L MT:1-10", "7f1fb8f9587f64643f6612ef1dd6d4ae"); + HCTest(CEUTRIO_MT_TEST_BAM, "-L MT:1-10", "7f1fb8f9587f64643f6612ef1dd6d4ae"); } private void HCTestIndelQualityScores(String bam, String args, String md5) { @@ -112,7 +110,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerSingleSampleIndelQualityScores() { - HCTestIndelQualityScores(NA12878_RECALIBRATED_BAM, "", "ce602282e80cca6d4272f940e20e90c3"); + HCTestIndelQualityScores(NA12878_RECALIBRATED_BAM, "", "7c20aa62633f4ce8ebf12950fbf05ec0"); } private void HCTestNearbySmallIntervals(String bam, String args, String md5) { @@ -149,7 +147,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerNearbySmallIntervals() { - HCTestNearbySmallIntervals(NA12878_BAM, "", "09335c01d2e90714af7f4c91156da0b1"); + HCTestNearbySmallIntervals(NA12878_BAM, "", "0ddc56f0a0fbcfefda79aa20b2ecf603"); } // This problem bam came from a user on the forum and it spotted a problem where the ReadClipper @@ -159,14 +157,14 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void HCTestProblematicReadsModifiedInActiveRegions() { final String base = String.format("-T HaplotypeCaller --disableDithering -R %s -I %s", REF, privateTestDir + "haplotype-problem-4.bam") + " --no_cmdline_in_header -o %s -minPruning 3 -L 4:49139026-49139965"; - final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("b34ddc93a7b9919e05da499508f44dd9")); + final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("0689d2c202849fd05617648eaf429b9a")); executeTest("HCTestProblematicReadsModifiedInActiveRegions: ", spec); } @Test public void HCTestStructuralIndels() { final String base = String.format("-T HaplotypeCaller --disableDithering -R %s -I %s", REF, privateTestDir + "AFR.structural.indels.bam") + " --no_cmdline_in_header -o %s -minPruning 6 -L 20:8187565-8187800 -L 20:18670537-18670730"; - final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("98a78b9f58ab197b827ef2ce3ab043d3")); + final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("91717e5e271742c2c9b67223e58f1320")); executeTest("HCTestStructuralIndels: ", spec); } @@ -188,7 +186,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void HCTestReducedBam() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "bamExample.ReducedRead.ADAnnotation.bam -o %s -L 1:67,225,396-67,288,518", 1, - Arrays.asList("6e6ef6e0326bee6d20d9fd37349fdb8c")); + Arrays.asList("5fe9310addf881bed4fde2354e59ce34")); executeTest("HC calling on a ReducedRead BAM", spec); } @@ -196,7 +194,30 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void testReducedBamWithReadsNotFullySpanningDeletion() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "reduced.readNotFullySpanningDeletion.bam -o %s -L 1:167871297", 1, - Arrays.asList("5e535983b2f7e5fb6c84fecffa092324")); + Arrays.asList("26a9917f6707536636451266de0116c3")); executeTest("test calling on a ReducedRead BAM where the reads do not fully span a deletion", spec); } + + // -------------------------------------------------------------------------------------------------------------- + // + // test dbSNP annotation + // + // -------------------------------------------------------------------------------------------------------------- + + @Test + public void HCTestDBSNPAnnotationWGS() { + WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( + "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + NA12878_PCRFREE + " -o %s -L 20:10,000,000-10,100,000 -D " + b37dbSNP132, 1, + Arrays.asList("cc6f2a76ee97ecc14a5f956ffbb21d88")); + executeTest("HC calling with dbSNP ID annotation on WGS intervals", spec); + } + + @Test + public void HCTestDBSNPAnnotationWEx() { + WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( + "-T HaplotypeCaller --disableDithering -R " + b37KGReference + " --no_cmdline_in_header -I " + NA12878_PCRFREE + " -o %s -L 20:10,000,000-11,000,000 -D " + b37dbSNP132 + + " -L " + hg19Intervals + " -isr INTERSECTION", 1, + Arrays.asList("51e91c8af61a6b47807165906baefb00")); + executeTest("HC calling with dbSNP ID annotation on WEx intervals", spec); + } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java index ff5a501cc..d009550f4 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerParallelIntegrationTest.java @@ -61,7 +61,7 @@ public class HaplotypeCallerParallelIntegrationTest extends WalkerTest { List tests = new ArrayList(); for ( final int nct : Arrays.asList(1, 2, 4) ) { - tests.add(new Object[]{nct, "c277fd65365d59b734260dd8423313bb"}); + tests.add(new Object[]{nct, "9da4cc89590c4c64a36f4a9c820f8609"}); } return tests.toArray(new Object[][]{}); diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KmerUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KmerUnitTest.java index 989c38628..116c987a6 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KmerUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KmerUnitTest.java @@ -47,13 +47,12 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; +import java.util.*; public class KmerUnitTest extends BaseTest { @DataProvider(name = "KMerCreationData") @@ -130,4 +129,40 @@ public class KmerUnitTest extends BaseTest { } } } + + @Test + public void testDifferingPositions() { + final String bases = "ACGTCAGACGTACGTTTGACGTCAGACGTACGT"; + final Kmer baseKmer = new Kmer(bases.getBytes()); + + + final int NUM_TEST_CASES = 30; + + for (int test = 0; test < NUM_TEST_CASES; test++) { + + final int numBasesToChange = test % bases.length(); + + // changes numBasesToChange bases - spread regularly through read string + final int step = (numBasesToChange > 0?Math.min(bases.length() / numBasesToChange,1) : 1); + + final byte[] newBases = bases.getBytes().clone(); + int actualChangedBases =0; // could be different from numBasesToChange due to roundoff + for (int idx=0; idx < numBasesToChange; idx+=step) { + // now change given positions + newBases[idx] = (newBases[idx] == (byte)'A'? (byte)'T':(byte)'A'); + actualChangedBases++; + } + + // compute changed positions + final int[] differingIndices = new int[newBases.length]; + final byte[] differingBases = new byte[newBases.length]; + final int numDiffs = baseKmer.getDifferingPositions(new Kmer(newBases),newBases.length,differingIndices,differingBases); + Assert.assertEquals(numDiffs,actualChangedBases); + for (int k=0; k < numDiffs; k++) { + final int idx = differingIndices[k]; + Assert.assertTrue(newBases[idx] != bases.getBytes()[idx]); + Assert.assertEquals(differingBases[idx],newBases[idx]); + } + } + } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java index a517e1cb1..2fda56665 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngineUnitTest.java @@ -47,6 +47,9 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import net.sf.picard.reference.IndexedFastaSequenceFile; +import net.sf.samtools.Cigar; +import net.sf.samtools.CigarElement; +import net.sf.samtools.CigarOperator; import net.sf.samtools.SAMFileHeader; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.readthreading.ReadThreadingAssembler; @@ -216,11 +219,15 @@ public class LocalAssemblyEngineUnitTest extends BaseTest { private List assemble(final Assembler assembler, final byte[] refBases, final GenomeLoc loc, final List reads) { final Haplotype refHaplotype = new Haplotype(refBases, true); + final Cigar c = new Cigar(); + c.add(new CigarElement(refHaplotype.getBases().length, CigarOperator.M)); + refHaplotype.setCigar(c); + final ActiveRegion activeRegion = new ActiveRegion(loc, null, true, genomeLocParser, 0); activeRegion.addAll(reads); final LocalAssemblyEngine engine = createAssembler(assembler); // logger.warn("Assembling " + activeRegion + " with " + engine); - return engine.runLocalAssembly(activeRegion, refHaplotype, refBases, loc, Collections.emptyList()); + return engine.runLocalAssembly(activeRegion, refHaplotype, refBases, loc, Collections.emptyList(), null); } @DataProvider(name = "SimpleAssemblyTestData") @@ -244,7 +251,7 @@ public class LocalAssemblyEngineUnitTest extends BaseTest { for ( int snpPos = 0; snpPos < windowSize; snpPos++) { if ( snpPos > excludeVariantsWithXbp && (windowSize - snpPos) >= excludeVariantsWithXbp ) { final byte[] altBases = ref.getBytes(); - altBases[snpPos] = 'N'; + altBases[snpPos] = altBases[snpPos] == 'A' ? (byte)'C' : (byte)'A'; final String alt = new String(altBases); tests.add(new Object[]{"SNP at " + snpPos, assembler, refLoc, ref, alt}); } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrectorUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrectorUnitTest.java new file mode 100644 index 000000000..e201b24fc --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/ReadErrorCorrectorUnitTest.java @@ -0,0 +1,190 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller; + +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.testng.Assert; +import org.testng.annotations.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class ReadErrorCorrectorUnitTest { + private static final boolean debug = true; + final String refChunk = "GCATAAACATGGCTCACTGC"; + final String refChunkHard = "AGCCTTGAACTCCTGGGCTCAAGTGATCCTCCTGCCTCAGTTTCCCATGTAGCTGGGACCACAGGTGGGGGCTCCACCCCTGGCTGATTTTTTTTTTTTTTTTTTTTTGAGATAGGGT"; + + @Test + public void TestBasicCorrectionSet() { + + final byte[] trueBases = refChunk.getBytes(); + final int numCorrections = 50; + final ReadErrorCorrector.CorrectionSet correctionSet = new ReadErrorCorrector.CorrectionSet(trueBases.length); + + int offset = 2; + for (int k=0; k < numCorrections; k++) { + // introduce one correction at a random offset in array. To make testing easier, we will replicate corrrection + final byte base = trueBases[offset]; + correctionSet.add(offset, base); + // skip to some other offset + offset += 7; + if (offset >= trueBases.length) + offset -= trueBases.length; + } + + for (int k=0; k < trueBases.length; k++) { + final byte corr = correctionSet.getConsensusCorrection(k); + Assert.assertEquals(corr, trueBases[k]); + } + } + + @Test + public void TestExtendedCorrectionSet() { + + final byte[] trueBases = refChunk.getBytes(); + final int numCorrections = 50; + final ReadErrorCorrector.CorrectionSet correctionSet = new ReadErrorCorrector.CorrectionSet(trueBases.length); + + for (int offset=0; offset < trueBases.length; offset++) { + // insert k corrections at offset k and make sure we get exactly k bases back + for (int k=0; k < offset; k++) + correctionSet.add(offset,trueBases[offset]); + + } + + for (int offset=0; offset < trueBases.length; offset++) { + Assert.assertEquals(correctionSet.get(offset).size(),offset); + } + } + + @Test + public void TestAddReadsToKmers() { + final int NUM_GOOD_READS = 500; + + final String bases = "AAAAAAAAAAAAAAA"; + final int READ_LENGTH = bases.length(); + final int kmerLengthForReadErrorCorrection = READ_LENGTH; + final List finalizedReadList = new ArrayList(NUM_GOOD_READS); + int offset = 0; + final byte[] quals = new byte[READ_LENGTH]; + + Arrays.fill(quals,(byte)30); + + for (int k=0; k < NUM_GOOD_READS; k++) { + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases.getBytes(), quals,READ_LENGTH+"M"); + finalizedReadList.add(read); + } + + ReadErrorCorrector readErrorCorrector = new ReadErrorCorrector(kmerLengthForReadErrorCorrection,(byte)6,10, debug,refChunkHard.getBytes()); + readErrorCorrector.addReadsToKmers(finalizedReadList); + + // special trivial case: kmer length is equal to read length. + // K-mer counter should hold then exactly one kmer + Assert.assertEquals(readErrorCorrector.countsByKMer.getCountedKmers().size(), 1); + for (final KMerCounter.CountedKmer kmer : readErrorCorrector.countsByKMer.getCountedKmers()) { + Assert.assertTrue(Arrays.equals( kmer.getKmer().bases(),bases.getBytes())); + Assert.assertEquals(kmer.getCount(),NUM_GOOD_READS); + } + + // special case 2: kmers are all the same but length < read length. + // Each kmer is added then readLength-kmerLength+1 times + final int KMER_LENGTH = 10; + readErrorCorrector = new ReadErrorCorrector(KMER_LENGTH,(byte)6,10, debug,refChunkHard.getBytes()); + readErrorCorrector.addReadsToKmers(finalizedReadList); + Assert.assertEquals(readErrorCorrector.countsByKMer.getCountedKmers().size(), 1); + for (final KMerCounter.CountedKmer kmer : readErrorCorrector.countsByKMer.getCountedKmers()) { + Assert.assertEquals(kmer.getCount(),NUM_GOOD_READS*(READ_LENGTH-KMER_LENGTH+1)); + } + + } + @Test + public void TestBasicErrorCorrection() { + final int NUM_GOOD_READS = 500; + final int NUM_BAD_READS = 10; + final int READ_LENGTH = 15; + final int kmerLengthForReadErrorCorrection = 10; + final List finalizedReadList = new ArrayList(NUM_GOOD_READS); + int offset = 0; + final byte[] quals = new byte[READ_LENGTH]; + + Arrays.fill(quals,(byte)30); + + for (int k=0; k < NUM_GOOD_READS; k++) { + final byte[] bases = Arrays.copyOfRange(refChunk.getBytes(),offset,offset+READ_LENGTH); + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals,READ_LENGTH+"M"); + finalizedReadList.add(read); + offset++; + if (offset >= refChunk.length()-READ_LENGTH) + offset = 0; + } + offset = 2; + // coverage profile is now perfectly triangular with "good" bases. Inject now bad bases with errors in them. + for (int k=0; k < NUM_BAD_READS; k++) { + final byte[] bases = finalizedReadList.get(k).getReadBases().clone(); + bases[offset] = 'N'; + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals, READ_LENGTH + "M"); + finalizedReadList.add(read); + offset += 7; + if (offset >= READ_LENGTH) + offset = 4; // just some randomly circulating offset for error position + } + + // now correct all reads + final ReadErrorCorrector readErrorCorrector = new ReadErrorCorrector(kmerLengthForReadErrorCorrection,(byte)6,10, debug,refChunkHard.getBytes()); + readErrorCorrector.addReadsToKmers(finalizedReadList); + readErrorCorrector.correctReads(finalizedReadList); + + // check that corrected reads have exactly same content as original reads + for (int k=0; k < NUM_BAD_READS; k++) { + final byte[] badBases = finalizedReadList.get(k).getReadBases(); + final byte[] originalBases = finalizedReadList.get(k).getReadBases(); + Assert.assertTrue(Arrays.equals(badBases,originalBases)); + } + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraphUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraphUnitTest.java index e57f5d6e0..f9cbc6c73 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraphUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraphUnitTest.java @@ -312,4 +312,19 @@ public class BaseGraphUnitTest extends BaseTest { Assert.assertTrue(BaseGraph.graphEquals(graph, expectedGraph)); } + + @Test(enabled = true) + public void testGetBases() { + + final int kmerSize = 4; + final String testString = "AATGGGGGCAATACTA"; + + final List vertexes = new ArrayList<>(); + for ( int i = 0; i <= testString.length() - kmerSize; i++ ) { + vertexes.add(new DeBruijnVertex(testString.substring(i, i + kmerSize))); + } + + final String result = new String(new DeBruijnGraph().getBasesForPath(vertexes)); + Assert.assertEquals(result, testString.substring(kmerSize - 1)); + } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixMergerUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixMergerUnitTest.java index cfed2f0b8..e1398e119 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixMergerUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixMergerUnitTest.java @@ -166,4 +166,20 @@ public class CommonSuffixMergerUnitTest extends BaseTest { splitter.merge(data.graph, data.v); assertSameHaplotypes(String.format("suffixMerge.%s.%d", data.commonSuffix, data.graph.vertexSet().size()), data.graph, original); } + + @Test + public void testDoesntMergeSourceNodes() { + final SeqGraph g = new SeqGraph(); + final SeqVertex v1 = new SeqVertex("A"); + final SeqVertex v2 = new SeqVertex("A"); + final SeqVertex v3 = new SeqVertex("A"); + final SeqVertex top = new SeqVertex("T"); + final SeqVertex b = new SeqVertex("C"); + g.addVertices(top, v1, v2, v3, top, b); + g.addEdges(top, v1, b); + g.addEdges(v2, b); // v2 doesn't have previous node, cannot be merged + g.addEdges(top, v3, b); + final SharedSequenceMerger merger = new SharedSequenceMerger(); + Assert.assertFalse(merger.merge(g, b), "Shouldn't be able to merge shared vertices, when one is a source"); + } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/MultiSampleEdgeUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/MultiSampleEdgeUnitTest.java index f11be6635..fc40edc42 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/MultiSampleEdgeUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/MultiSampleEdgeUnitTest.java @@ -54,19 +54,29 @@ import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; +import java.util.*; public class MultiSampleEdgeUnitTest extends BaseTest { + + private class MultiplicityTestProvider { + final List countsPerSample; + final int numSamplesPruning; + public MultiplicityTestProvider(final List countsPerSample, final int numSamplesPruning) { + this.countsPerSample = countsPerSample; + this.numSamplesPruning = numSamplesPruning; + } + } + @DataProvider(name = "MultiplicityData") public Object[][] makeMultiplicityData() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); final List countsPerSample = Arrays.asList(0, 1, 2, 3, 4, 5); - for ( final int nSamples : Arrays.asList(1, 2, 3, 4, 5)) { - for ( final List perm : Utils.makePermutations(countsPerSample, nSamples, false) ) { - tests.add(new Object[]{perm}); + for ( final int numSamplesPruning : Arrays.asList(1, 2, 3) ) { + for ( final int nSamples : Arrays.asList(1, 2, 3, 4, 5)) { + for ( final List perm : Utils.makePermutations(countsPerSample, nSamples, false) ) { + tests.add(new Object[]{new MultiplicityTestProvider(perm, numSamplesPruning)}); + } } } @@ -77,15 +87,15 @@ public class MultiSampleEdgeUnitTest extends BaseTest { * Example testng test using MyDataProvider */ @Test(dataProvider = "MultiplicityData") - public void testMultiplicity(final List countsPerSample) { - final MultiSampleEdge edge = new MultiSampleEdge(false, 0); + public void testMultiplicity(final MultiplicityTestProvider cfg) { + final MultiSampleEdge edge = new MultiSampleEdge(false, 0, cfg.numSamplesPruning); Assert.assertEquals(edge.getMultiplicity(), 0); Assert.assertEquals(edge.getPruningMultiplicity(), 0); int total = 0; - for ( int i = 0; i < countsPerSample.size(); i++ ) { + for ( int i = 0; i < cfg.countsPerSample.size(); i++ ) { int countForSample = 0; - for ( int count = 0; count < countsPerSample.get(i); count++ ) { + for ( int count = 0; count < cfg.countsPerSample.get(i); count++ ) { edge.incMultiplicity(1); total++; countForSample++; @@ -95,9 +105,11 @@ public class MultiSampleEdgeUnitTest extends BaseTest { edge.flushSingleSampleMultiplicity(); } - final int max = MathUtils.arrayMax(ArrayUtils.toPrimitive(countsPerSample.toArray(new Integer[countsPerSample.size()]))); + ArrayList counts = new ArrayList<>(cfg.countsPerSample); + counts.add(0); + Collections.sort(counts); + final int prune = counts.get(Math.max(counts.size() - cfg.numSamplesPruning, 0)); Assert.assertEquals(edge.getMultiplicity(), total); - Assert.assertEquals(edge.getPruningMultiplicity(), max); - Assert.assertEquals(edge.getMaxSingleSampleMultiplicity(), max); + Assert.assertEquals(edge.getPruningMultiplicity(), prune); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssemblerUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssemblerUnitTest.java index 8efb3d486..8269b9c20 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssemblerUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingAssemblerUnitTest.java @@ -83,9 +83,10 @@ public class ReadThreadingAssemblerUnitTest extends BaseTest { } public SeqGraph assemble() { - assembler.removePathsNotConnectedToRef = false; // need to pass some of the tests + assembler.removePathsNotConnectedToRef = false; // needed to pass some of the tests + assembler.setRecoverDanglingTails(false); // needed to pass some of the tests assembler.setDebugGraphTransformations(true); - final SeqGraph graph = assembler.assemble(reads, refHaplotype).get(0); + final SeqGraph graph = assembler.assemble(reads, refHaplotype, Collections.emptyList()).get(0); if ( DEBUG ) graph.printGraph(new File("test.dot"), 0); return graph; } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraphUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraphUnitTest.java index 10c1cc00d..ed91cccb3 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraphUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/readthreading/ReadThreadingGraphUnitTest.java @@ -48,8 +48,12 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.readthreading; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.Kmer; -import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.MultiSampleEdge; +import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.*; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.Assert; +import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.io.File; @@ -145,7 +149,136 @@ public class ReadThreadingGraphUnitTest extends BaseTest { } } - // TODO -- update to use determineKmerSizeAndNonUniques directly + @Test(enabled = !DEBUG) + public void testCyclesInGraph() { + + // b37 20:12655200-12655850 + final String ref = "CAATTGTCATAGAGAGTGACAAATGTTTCAAAAGCTTATTGACCCCAAGGTGCAGCGGTGCACATTAGAGGGCACCTAAGACAGCCTACAGGGGTCAGAAAAGATGTCTCAGAGGGACTCACACCTGAGCTGAGTTGTGAAGGAAGAGCAGGATAGAATGAGCCAAAGATAAAGACTCCAGGCAAAAGCAAATGAGCCTGAGGGAAACTGGAGCCAAGGCAAGAGCAGCAGAAAAGAGCAAAGCCAGCCGGTGGTCAAGGTGGGCTACTGTGTATGCAGAATGAGGAAGCTGGCCAAGTAGACATGTTTCAGATGATGAACATCCTGTATACTAGATGCATTGGAACTTTTTTCATCCCCTCAACTCCACCAAGCCTCTGTCCACTCTTGGTACCTCTCTCCAAGTAGACATATTTCAGATCATGAACATCCTGTGTACTAGATGCATTGGAAATTTTTTCATCCCCTCAACTCCACCCAGCCTCTGTCCACACTTGGTACCTCTCTCTATTCATATCTCTGGCCTCAAGGAGGGTATTTGGCATTAGTAAATAAATTCCAGAGATACTAAAGTCAGATTTTCTAAGACTGGGTGAATGACTCCATGGAAGAAGTGAAAAAGAGGAAGTTGTAATAGGGAGACCTCTTCGG"; + + // SNP at 20:12655528 creates a cycle for small kmers + final String alt = "CAATTGTCATAGAGAGTGACAAATGTTTCAAAAGCTTATTGACCCCAAGGTGCAGCGGTGCACATTAGAGGGCACCTAAGACAGCCTACAGGGGTCAGAAAAGATGTCTCAGAGGGACTCACACCTGAGCTGAGTTGTGAAGGAAGAGCAGGATAGAATGAGCCAAAGATAAAGACTCCAGGCAAAAGCAAATGAGCCTGAGGGAAACTGGAGCCAAGGCAAGAGCAGCAGAAAAGAGCAAAGCCAGCCGGTGGTCAAGGTGGGCTACTGTGTATGCAGAATGAGGAAGCTGGCCAAGTAGACATGTTTCAGATGATGAACATCCTGTGTACTAGATGCATTGGAACTTTTTTCATCCCCTCAACTCCACCAAGCCTCTGTCCACTCTTGGTACCTCTCTCCAAGTAGACATATTTCAGATCATGAACATCCTGTGTACTAGATGCATTGGAAATTTTTTCATCCCCTCAACTCCACCCAGCCTCTGTCCACACTTGGTACCTCTCTCTATTCATATCTCTGGCCTCAAGGAGGGTATTTGGCATTAGTAAATAAATTCCAGAGATACTAAAGTCAGATTTTCTAAGACTGGGTGAATGACTCCATGGAAGAAGTGAAAAAGAGGAAGTTGTAATAGGGAGACCTCTTCGG"; + + final List reads = new ArrayList<>(); + for ( int index = 0; index < alt.length() - 100; index += 20 ) + reads.add(ArtificialSAMUtils.createArtificialRead(Arrays.copyOfRange(alt.getBytes(), index, index + 100), Utils.dupBytes((byte) 30, 100), 100 + "M")); + + // test that there are cycles detected for small kmer + final ReadThreadingGraph rtgraph25 = new ReadThreadingGraph(25); + rtgraph25.addSequence("ref", ref.getBytes(), null, true); + for ( final GATKSAMRecord read : reads ) + rtgraph25.addRead(read); + rtgraph25.buildGraphIfNecessary(); + Assert.assertTrue(rtgraph25.hasCycles()); + + // test that there are no cycles detected for large kmer + final ReadThreadingGraph rtgraph75 = new ReadThreadingGraph(75); + rtgraph75.addSequence("ref", ref.getBytes(), null, true); + for ( final GATKSAMRecord read : reads ) + rtgraph75.addRead(read); + rtgraph75.buildGraphIfNecessary(); + Assert.assertFalse(rtgraph75.hasCycles()); + } + + @Test(enabled = !DEBUG) + public void testNsInReadsAreNotUsedForGraph() { + + final int length = 100; + final byte[] ref = Utils.dupBytes((byte)'A', length); + + final ReadThreadingGraph rtgraph = new ReadThreadingGraph(25); + rtgraph.addSequence("ref", ref, null, true); + + // add reads with Ns at any position + for ( int i = 0; i < length; i++ ) { + final byte[] bases = ref.clone(); + bases[i] = 'N'; + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, Utils.dupBytes((byte) 30, length), length + "M"); + rtgraph.addRead(read); + } + rtgraph.buildGraphIfNecessary(); + + final SeqGraph graph = rtgraph.convertToSequenceGraph(); + final KBestPaths pathFinder = new KBestPaths<>(false); + Assert.assertEquals(pathFinder.getKBestPaths(graph, length, graph.getReferenceSourceVertex(), graph.getReferenceSinkVertex()).size(), 1); + } + + @DataProvider(name = "DanglingTails") + public Object[][] makeDanglingTailsData() { + List tests = new ArrayList(); + + // add 1M to the expected CIGAR because it includes the previous (common) base too + tests.add(new Object[]{"AAAAAAAAAA", "CAAA", "5M", true, 3}); // incomplete haplotype + tests.add(new Object[]{"AAAAAAAAAA", "CAAAAAAAAAA", "1M1I10M", true, 10}); // insertion + tests.add(new Object[]{"CCAAAAAAAAAA", "AAAAAAAAAA", "1M2D10M", true, 10}); // deletion + tests.add(new Object[]{"AAAAAAAA", "CAAAAAAA", "9M", true, 7}); // 1 snp + tests.add(new Object[]{"AAAAAAAA", "CAAGATAA", "9M", true, 2}); // several snps + tests.add(new Object[]{"AAAAA", "C", "1M4D1M", true, -1}); // funky SW alignment + tests.add(new Object[]{"AAAAA", "CA", "1M3D2M", true, 1}); // very little data + tests.add(new Object[]{"AAAAAAA", "CAAAAAC", "8M", true, -1}); // ends in mismatch + tests.add(new Object[]{"AAAAAA", "CGAAAACGAA", "1M2I4M2I2M", false, 0}); // alignment is too complex + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "DanglingTails", enabled = !DEBUG) + public void testDanglingTails(final String refEnd, + final String altEnd, + final String cigar, + final boolean cigarIsGood, + final int mergePointDistanceFromSink) { + + final int kmerSize = 15; + + // construct the haplotypes + final String commonPrefix = "AAAAAAAAAACCCCCCCCCCGGGGGGGGGGTTTTTTTTTT"; + final String ref = commonPrefix + refEnd; + final String alt = commonPrefix + altEnd; + + // create the graph and populate it + final ReadThreadingGraph rtgraph = new ReadThreadingGraph(kmerSize); + rtgraph.addSequence("ref", ref.getBytes(), null, true); + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(alt.getBytes(), Utils.dupBytes((byte) 30, alt.length()), alt.length() + "M"); + rtgraph.addRead(read); + rtgraph.buildGraphIfNecessary(); + + // confirm that we have just a single dangling tail + MultiDeBruijnVertex altSink = null; + for ( final MultiDeBruijnVertex v : rtgraph.vertexSet() ) { + if ( rtgraph.isSink(v) && !rtgraph.isReferenceNode(v) ) { + Assert.assertTrue(altSink == null, "We found more than one non-reference sink"); + altSink = v; + } + } + + Assert.assertTrue(altSink != null, "We did not find a non-reference sink"); + + // confirm that the SW alignment agrees with our expectations + final ReadThreadingGraph.DanglingTailMergeResult result = rtgraph.generateCigarAgainstReferencePath(altSink); + Assert.assertTrue(cigar.equals(result.cigar.toString()), "SW generated cigar = " + result.cigar.toString()); + + // confirm that the goodness of the cigar agrees with our expectations + Assert.assertEquals(rtgraph.cigarIsOkayToMerge(result.cigar), cigarIsGood); + + // confirm that the tail merging works as expected + if ( cigarIsGood ) { + final int mergeResult = rtgraph.mergeDanglingTail(result); + Assert.assertTrue(mergeResult == 1 || mergePointDistanceFromSink == -1); + + // confirm that we created the appropriate edge + if ( mergePointDistanceFromSink >= 0 ) { + MultiDeBruijnVertex v = altSink; + for ( int i = 0; i < mergePointDistanceFromSink; i++ ) { + if ( rtgraph.inDegreeOf(v) != 1 ) + Assert.fail("Encountered vertex with multiple sources"); + v = rtgraph.getEdgeSource(rtgraph.incomingEdgeOf(v)); + } + Assert.assertTrue(rtgraph.outDegreeOf(v) > 1); + } + } + } + + +// TODO -- update to use determineKmerSizeAndNonUniques directly // @DataProvider(name = "KmerSizeData") // public Object[][] makeKmerSizeDataProvider() { // List tests = new ArrayList(); diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManagerUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManagerUnitTest.java index 9bcd7a3a3..0f910507e 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManagerUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/indels/ConstrainedMateFixingManagerUnitTest.java @@ -66,9 +66,10 @@ public class ConstrainedMateFixingManagerUnitTest extends BaseTest { @BeforeClass public void beforeClass() { - header = ArtificialSAMUtils.createArtificialSamHeader(3, 1, 100); + header = ArtificialSAMUtils.createArtificialSamHeader(3, 1, 10000); genomeLocParser = new GenomeLocParser(header.getSequenceDictionary()); } + @Test public void testSecondaryAlignmentsDoNotInterfere() { final List properReads = ArtificialSAMUtils.createPair(header, "foo", 1, 10, 30, true, false); @@ -105,4 +106,29 @@ public class ConstrainedMateFixingManagerUnitTest extends BaseTest { } } + @Test + public void testSecondaryAlignmentsDoNotCauseAccidentalRemovalOfMate() { + final List properReads = ArtificialSAMUtils.createPair(header, "foo", 1, 530, 1594, true, false); + final GATKSAMRecord read1 = properReads.get(0); + read1.setFlags(99); // first in proper pair, mate negative strand + + final GATKSAMRecord read2Primary = properReads.get(1); + read2Primary.setFlags(147); // second in pair, mate unmapped, not primary alignment + read2Primary.setAlignmentStart(1596); // move the read + + final GATKSAMRecord read2NonPrimary = new GATKSAMRecord(read2Primary); + read2NonPrimary.setReadName("foo"); + read2NonPrimary.setFlags(393); // second in proper pair, on reverse strand + read2NonPrimary.setAlignmentStart(451); + read2NonPrimary.setMateAlignmentStart(451); + + final ConstrainedMateFixingManager manager = new ConstrainedMateFixingManager(null, genomeLocParser, 10000, 200, 10000); + manager.addRead(read2NonPrimary, false, false); + manager.addRead(read1, false, false); + + for ( int i = 0; i < ConstrainedMateFixingManager.EMIT_FREQUENCY; i++ ) + manager.addRead(ArtificialSAMUtils.createArtificialRead(header, "foo" + i, 0, 1500, 10), false, false); + + Assert.assertTrue(manager.forMateMatching.containsKey("foo")); + } } \ No newline at end of file diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/CombineVariantsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/CombineVariantsIntegrationTest.java index 6c4072962..917cbd542 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/CombineVariantsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/variantutils/CombineVariantsIntegrationTest.java @@ -189,4 +189,15 @@ public class CombineVariantsIntegrationTest extends WalkerTest { Arrays.asList("aa926eae333208dc1f41fe69dc95d7a6")); cvExecuteTest("combineDBSNPDuplicateSites:", spec, true); } + + @Test + public void combineLeavesUnfilteredRecordsUnfiltered() { + WalkerTestSpec spec = new WalkerTestSpec( + "-T CombineVariants --no_cmdline_in_header -o %s " + + " -R " + b37KGReference + + " -V " + privateTestDir + "combineVariantsLeavesRecordsUnfiltered.vcf", + 1, + Arrays.asList("f8c014d0af7e014475a2a448dc1f9cef")); + cvExecuteTest("combineLeavesUnfilteredRecordsUnfiltered: ", spec, false); + } } \ No newline at end of file diff --git a/protected/java/test/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMapUnitTest.java b/protected/java/test/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMapUnitTest.java index 9530ea41f..651beffc8 100644 --- a/protected/java/test/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMapUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMapUnitTest.java @@ -233,7 +233,7 @@ public class PerReadAlleleLikelihoodMapUnitTest extends BaseTest { tests.add(new Object[]{100, 0.01, true, Arrays.asList(-5.0, -10.0)}); tests.add(new Object[]{100, 0.01, false, Arrays.asList(-5.0, -10.0, -3.0)}); tests.add(new Object[]{100, 0.01, false, Arrays.asList(-5.0, -10.0, -2.0)}); - tests.add(new Object[]{100, 0.01, true, Arrays.asList(-5.0, -10.0, -4.0)}); + tests.add(new Object[]{100, 0.01, true, Arrays.asList(-5.0, -10.0, -4.2)}); tests.add(new Object[]{100, 0.001, true, Arrays.asList(-5.0, -10.0)}); tests.add(new Object[]{100, 0.001, false, Arrays.asList(-5.0, -10.0, 0.0)}); @@ -243,7 +243,7 @@ public class PerReadAlleleLikelihoodMapUnitTest extends BaseTest { @Test(dataProvider = "PoorlyModelledReadData") public void testPoorlyModelledRead(final int readLen, final double maxErrorRatePerBase, final boolean expected, final List log10likelihoods) { final byte[] bases = Utils.dupBytes((byte)'A', readLen); - final byte[] quals = Utils.dupBytes((byte) 30, readLen); + final byte[] quals = Utils.dupBytes((byte) 40, readLen); final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals, readLen + "M"); @@ -279,7 +279,7 @@ public class PerReadAlleleLikelihoodMapUnitTest extends BaseTest { final double likelihood = bad ? -100.0 : 0.0; final byte[] bases = Utils.dupBytes((byte)'A', readLen); - final byte[] quals = Utils.dupBytes((byte) 30, readLen); + final byte[] quals = Utils.dupBytes((byte) 40, readLen); final Allele allele = Allele.create(Utils.dupString("A", readI+1)); diff --git a/protected/java/test/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriterUnitTest.java b/protected/java/test/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriterUnitTest.java index 91a2988aa..0c76ad338 100644 --- a/protected/java/test/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriterUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriterUnitTest.java @@ -177,10 +177,10 @@ public class HaplotypeBAMWriterUnitTest extends BaseTest { final GATKSAMRecord originalReadCopy = (GATKSAMRecord)read.clone(); if ( expectedReadCigar == null ) { - Assert.assertNull(writer.createReadAlignedToRef(read, haplotype, refStart)); + Assert.assertNull(writer.createReadAlignedToRef(read, haplotype, refStart, true)); } else { final Cigar expectedCigar = TextCigarCodec.getSingleton().decode(expectedReadCigar); - final GATKSAMRecord alignedRead = writer.createReadAlignedToRef(read, haplotype, refStart); + final GATKSAMRecord alignedRead = writer.createReadAlignedToRef(read, haplotype, refStart, true); Assert.assertEquals(alignedRead.getReadName(), originalReadCopy.getReadName()); Assert.assertEquals(alignedRead.getAlignmentStart(), expectedReadStart); @@ -290,7 +290,7 @@ public class HaplotypeBAMWriterUnitTest extends BaseTest { @Test(dataProvider = "ComplexReadAlignedToRef", enabled = !DEBUG) public void testReadAlignedToRefComplexAlignment(final int testIndex, final GATKSAMRecord read, final String reference, final Haplotype haplotype, final int expectedMaxMismatches) throws Exception { final HaplotypeBAMWriter writer = new CalledHaplotypeBAMWriter(new MockBAMWriter()); - final GATKSAMRecord alignedRead = writer.createReadAlignedToRef(read, haplotype, 1); + final GATKSAMRecord alignedRead = writer.createReadAlignedToRef(read, haplotype, 1, true); if ( alignedRead != null ) { final int mismatches = AlignmentUtils.getMismatchCount(alignedRead, reference.getBytes(), alignedRead.getAlignmentStart() - 1).numMismatches; Assert.assertTrue(mismatches <= expectedMaxMismatches, diff --git a/public/R/scripts/org/broadinstitute/sting/utils/recalibration/BQSR.R b/public/R/scripts/org/broadinstitute/sting/utils/recalibration/BQSR.R index 8a9eecf48..b0055dd10 100644 --- a/public/R/scripts/org/broadinstitute/sting/utils/recalibration/BQSR.R +++ b/public/R/scripts/org/broadinstitute/sting/utils/recalibration/BQSR.R @@ -12,7 +12,27 @@ if ( interactive() ) { args <- commandArgs(TRUE) } data <- read.csv(args[1]) + +data$Recalibration = as.factor(sapply(as.character(data$Recalibration),function(x) { + xu = toupper(x); + if (xu == "ORIGINAL") "BEFORE" else + if (xu == "RECALIBRATED") "AFTER" else + if (xu == "RECALIBRATION") "BQSR" else + xu })); + gsa.report <- gsa.read.gatkreport(args[2]) + +gsa.report$Arguments$Value = as.character(gsa.report$Arguments$Value); +gsa.report$Arguments = subset(gsa.report$Arguments,subset= Argument != "plot_pdf_file"); +if (length(levels(data$Recalibration)) > 1) { + gsa.report$Arguments = subset(gsa.report$Arguments,subset= Argument != "recalibration_report"); +} +gsa.report$Arguments$Value[gsa.report$Argument$Value == "null"] = "None"; + +gsa.report.covariate.argnum = gsa.report$Arguments$Argument == "covariate"; +gsa.report$Arguments$Value[gsa.report.covariate.argnum] = sapply(strsplit(gsa.report$Arguments$Value[gsa.report.covariate.argnum],","),function(x) { + y = sub("(^.+)Covariate","\\1",x); paste(y,collapse=",") } ); + data <- within(data, EventType <- factor(EventType, levels = rev(levels(EventType)))) numRG = length(unique(data$ReadGroup)) @@ -54,31 +74,31 @@ for(cov in levels(data$CovariateName)) { # for each covariate in turn d=rbind(dSub, dIns, dDel) if( cov != "QualityScore" ) { - p <- ggplot(d, aes(x=CovariateValue,y=Accuracy,alpha=log10(Observations))) + + p <- ggplot(d, aes(x=CovariateValue,y=Accuracy,alpha=log10(Observations))) + ylim(min(-10,d$Accuracy),max(10,d$Accuracy)) + geom_abline(intercept=0, slope=0, linetype=2) + xlab(paste(cov,"Covariate")) + ylab("Quality Score Accuracy") + blankTheme if(cov == "Cycle") { - b <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("maroon1","blue")) + facet_grid(.~EventType) + + b <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) + facet_grid(.~EventType) + opts(axis.text.x=theme_text(angle=90, hjust=0)) p <- ggplot(d, aes(x=CovariateValue,y=AverageReportedQuality,alpha=log10(Observations))) + xlab(paste(cov,"Covariate")) + - ylab("Mean Quality Score") + + ylab("Mean Quality Score") + ylim(0,max(42,d$AverageReportedQuality)) + blankTheme - e <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("maroon1","blue")) + facet_grid(.~EventType) + + e <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) + facet_grid(.~EventType) + opts(axis.text.x=theme_text(angle=90, hjust=0)) } else { - c <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("maroon1","blue")) + facet_grid(.~EventType) + + c <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) + facet_grid(.~EventType) + opts(axis.text.x=theme_text(angle=90, hjust=0)) + xlab(paste(cov,"Covariate (3 base suffix)")) p <- ggplot(d, aes(x=CovariateValue,y=AverageReportedQuality,alpha=log10(Observations))) + xlab(paste(cov,"Covariate (3 base suffix)")) + ylab("Mean Quality Score") + blankTheme - f <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("maroon1","blue")) + facet_grid(.~EventType) + + f <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) + facet_grid(.~EventType) + opts(axis.text.x=theme_text(angle=90, hjust=0)) } @@ -88,14 +108,14 @@ for(cov in levels(data$CovariateName)) { # for each covariate in turn xlab("Reported Quality Score") + ylab("Empirical Quality Score") + blankTheme - a <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("maroon1","blue")) + facet_grid(.~EventType) + a <- p + geom_point(aes(color=Recalibration)) + scale_color_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) + facet_grid(.~EventType) p <- ggplot(d, aes(x=CovariateValue)) + xlab(paste(cov,"Covariate")) + ylab("No. of Observations (area normalized)") + blankTheme d <- p + geom_histogram(aes(fill=Recalibration,weight=Observations,y=..ndensity..),alpha=0.6,binwidth=1,position="identity") - d <- d + scale_fill_manual(values=c("maroon1","blue")) + d <- d + scale_fill_manual(values=c("BEFORE"="maroon1","AFTER"="blue","BQSR"="black")) d <- d + facet_grid(.~EventType) # d <- d + scale_y_continuous(formatter="comma") } diff --git a/public/R/src/org/broadinstitute/sting/utils/R/gsalib/DESCRIPTION b/public/R/src/org/broadinstitute/sting/utils/R/gsalib/DESCRIPTION index 6116e8c66..ecf76a95b 100644 --- a/public/R/src/org/broadinstitute/sting/utils/R/gsalib/DESCRIPTION +++ b/public/R/src/org/broadinstitute/sting/utils/R/gsalib/DESCRIPTION @@ -3,8 +3,11 @@ Type: Package Title: Utility functions Version: 1.0 Date: 2010-10-02 +Imports: gplots, ggplot2, png Author: Kiran Garimella -Maintainer: Kiran Garimella +Maintainer: Mauricio Carneiro +BugReports: http://gatkforums.broadinstitute.org Description: Utility functions for GATK NGS analyses License: BSD LazyLoad: yes +NeedsCompilation: no diff --git a/public/R/src/org/broadinstitute/sting/utils/R/gsalib/NAMESPACE b/public/R/src/org/broadinstitute/sting/utils/R/gsalib/NAMESPACE new file mode 100644 index 000000000..0bfe475b4 --- /dev/null +++ b/public/R/src/org/broadinstitute/sting/utils/R/gsalib/NAMESPACE @@ -0,0 +1 @@ +exportPattern("^[^\\.]") \ No newline at end of file diff --git a/public/R/src/org/broadinstitute/sting/utils/R/gsalib/data/tearsheetdrop.jpg b/public/R/src/org/broadinstitute/sting/utils/R/gsalib/data/tearsheetdrop.jpg deleted file mode 100755 index c9d480fa0..000000000 Binary files a/public/R/src/org/broadinstitute/sting/utils/R/gsalib/data/tearsheetdrop.jpg and /dev/null differ diff --git a/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java b/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java index 314de29c7..c4f1a286d 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java +++ b/public/java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java @@ -170,6 +170,14 @@ public class GenomeAnalysisEngine { this.walker = walker; } + /** + * The short name of the current GATK walker as a string + * @return a non-null String + */ + public String getWalkerName() { + return getWalkerName(walker.getClass()); + } + /** * A processed collection of SAM reader identifiers. */ @@ -293,9 +301,11 @@ public class GenomeAnalysisEngine { // create the output streams initializeOutputStreams(microScheduler.getOutputTracker()); - logger.info("Creating shard strategy for " + readsDataSource.getReaderIDs().size() + " BAM files"); + // Initializing the shard iterator / BAM schedule might take some time, so let the user know vaguely what's going on + logger.info("Preparing for traversal" + + (readsDataSource.getReaderIDs().size() > 0 ? String.format(" over %d BAM files", readsDataSource.getReaderIDs().size()) : "")); Iterable shardStrategy = getShardStrategy(readsDataSource,microScheduler.getReference(),intervals); - logger.info("Done creating shard strategy"); + logger.info("Done preparing for traversal"); // execute the microscheduler, storing the results return microScheduler.execute(this.walker, shardStrategy); @@ -342,11 +352,18 @@ public class GenomeAnalysisEngine { * @return A collection of available filters. */ public Collection createFilters() { - final List filters = WalkerManager.getReadFilters(walker,this.getFilterManager()); + final List filters = new LinkedList<>(); + + // First add the user requested filters if (this.getArguments().readGroupBlackList != null && this.getArguments().readGroupBlackList.size() > 0) filters.add(new ReadGroupBlackListFilter(this.getArguments().readGroupBlackList)); for(final String filterName: this.getArguments().readFilters) filters.add(this.getFilterManager().createByName(filterName)); + + // now add the walker default filters. This ordering is critical important if + // users need to apply filters that fix up reads that would be removed by default walker filters + filters.addAll(WalkerManager.getReadFilters(walker,this.getFilterManager())); + return Collections.unmodifiableList(filters); } @@ -463,9 +480,8 @@ public class GenomeAnalysisEngine { DownsamplingMethod commandLineMethod = argCollection.getDownsamplingMethod(); DownsamplingMethod walkerMethod = WalkerManager.getDownsamplingMethod(walker); - DownsamplingMethod defaultMethod = DownsamplingMethod.getDefaultDownsamplingMethod(walker); - DownsamplingMethod method = commandLineMethod != null ? commandLineMethod : (walkerMethod != null ? walkerMethod : defaultMethod); + DownsamplingMethod method = commandLineMethod != null ? commandLineMethod : walkerMethod; method.checkCompatibilityWithWalker(walker); return method; } diff --git a/public/java/src/org/broadinstitute/sting/gatk/ReadMetrics.java b/public/java/src/org/broadinstitute/sting/gatk/ReadMetrics.java index aadb57985..29372abcd 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/ReadMetrics.java +++ b/public/java/src/org/broadinstitute/sting/gatk/ReadMetrics.java @@ -40,37 +40,27 @@ public class ReadMetrics implements Cloneable { private long nRecords; // How many reads have we processed, along with those skipped for various reasons private long nReads; - private long nSkippedReads; - private long nUnmappedReads; - private long nNotPrimary; - private long nBadAlignments; - private long nSkippedIndels; - private long nDuplicates; - private Map counter = new HashMap(); + + // keep track of filtered records by filter type (class) + private Map filterCounter = new HashMap<>(); /** * Combines these metrics with a set of other metrics, storing the results in this class. * @param metrics The metrics to fold into this class. */ - public void incrementMetrics(ReadMetrics metrics) { + public synchronized void incrementMetrics(ReadMetrics metrics) { nRecords += metrics.nRecords; nReads += metrics.nReads; - nSkippedReads += metrics.nSkippedReads; - nUnmappedReads += metrics.nUnmappedReads; - nNotPrimary += metrics.nNotPrimary; - nBadAlignments += metrics.nBadAlignments; - nSkippedIndels += metrics.nSkippedIndels; - nDuplicates += metrics.nDuplicates; - for(Map.Entry counterEntry: metrics.counter.entrySet()) { - Class counterType = counterEntry.getKey(); - long newValue = (counter.containsKey(counterType) ? counter.get(counterType) : 0) + counterEntry.getValue(); - counter.put(counterType,newValue); + for(Map.Entry counterEntry: metrics.filterCounter.entrySet()) { + final String counterType = counterEntry.getKey(); + final long newValue = (filterCounter.containsKey(counterType) ? filterCounter.get(counterType) : 0) + counterEntry.getValue(); + filterCounter.put(counterType, newValue); } } /** * Create a copy of the given read metrics. - * @return + * @return a non-null clone */ public ReadMetrics clone() { ReadMetrics newMetrics; @@ -82,33 +72,18 @@ public class ReadMetrics implements Cloneable { } newMetrics.nRecords = nRecords; newMetrics.nReads = nReads; - newMetrics.nSkippedReads = nSkippedReads; - newMetrics.nUnmappedReads = nUnmappedReads; - newMetrics.nNotPrimary = nNotPrimary; - newMetrics.nBadAlignments = nBadAlignments; - newMetrics.nSkippedIndels = nSkippedIndels; - newMetrics.nDuplicates = nDuplicates; - newMetrics.counter = new HashMap(counter); + newMetrics.filterCounter = new HashMap<>(filterCounter); return newMetrics; } - public void incrementFilter(SamRecordFilter filter) { - long c = 0; - if ( counter.containsKey(filter.getClass()) ) { - c = counter.get(filter.getClass()); - } - - counter.put(filter.getClass(), c + 1L); + public void setFilterCount(final String filter, final long count) { + filterCounter.put(filter, count); } public Map getCountsByFilter() { - final TreeMap sortedCounts = new TreeMap(); - for(Map.Entry counterEntry: counter.entrySet()) { - sortedCounts.put(counterEntry.getKey().getSimpleName(),counterEntry.getValue()); - } - return sortedCounts; + return new TreeMap<>(filterCounter); } /** @@ -143,95 +118,4 @@ public class ReadMetrics implements Cloneable { public void incrementNumReadsSeen() { nReads++; } - - /** - * Gets the cumulative number of reads skipped in the course of this run. - * @return Cumulative number of reads skipped in the course of this run. - */ - public long getNumSkippedReads() { - return nSkippedReads; - } - - /** - * Increments the cumulative number of reads skipped in the course of this run. - */ - public void incrementNumSkippedReads() { - nSkippedReads++; - } - - /** - * Gets the number of unmapped reads skipped in the course of this run. - * @return The number of unmapped reads skipped. - */ - public long getNumUnmappedReads() { - return nUnmappedReads; - } - - /** - * Increments the number of unmapped reads skipped in the course of this run. - */ - public void incrementNumUnmappedReads() { - nUnmappedReads++; - } - - /** - * - * @return - */ - public long getNumNonPrimaryReads() { - return nNotPrimary; - } - - /** - * - */ - public void incrementNumNonPrimaryReads() { - nNotPrimary++; - } - - /** - * - * @return - */ - public long getNumBadAlignments() { - return nBadAlignments; - } - - /** - * - */ - public void incrementNumBadAlignments() { - nBadAlignments++; - } - - /** - * - * @return - */ - public long getNumSkippedIndels() { - return nSkippedIndels; - } - - /** - * - */ - public void incrementNumSkippedIndels() { - nSkippedIndels++; - } - - /** - * - * @return - */ - public long getNumDuplicates() { - return nDuplicates; - } - - /** - * - */ - public void incrementNumDuplicates() { - nDuplicates++; - } - } diff --git a/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java b/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java index 8d1fa4638..b5113fdea 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java +++ b/public/java/src/org/broadinstitute/sting/gatk/arguments/GATKArgumentCollection.java @@ -69,8 +69,8 @@ public class GATKArgumentCollection { // // -------------------------------------------------------------------------------------------------------------- - @Argument(fullName = "phone_home", shortName = "et", doc="What kind of GATK run report should we generate? STANDARD is the default, can be NO_ET so nothing is posted to the run repository. Please see " + UserException.PHONE_HOME_DOCS_URL + " for details.", required = false) - public GATKRunReport.PhoneHomeOption phoneHomeType = GATKRunReport.PhoneHomeOption.STANDARD; + @Argument(fullName = "phone_home", shortName = "et", doc="What kind of GATK run report should we generate? AWS is the default, can be NO_ET so nothing is posted to the run repository. Please see " + UserException.PHONE_HOME_DOCS_URL + " for details.", required = false) + public GATKRunReport.PhoneHomeOption phoneHomeType = GATKRunReport.PhoneHomeOption.AWS; @Argument(fullName = "gatk_key", shortName = "K", doc="GATK Key file. Required if running with -et NO_ET. Please see " + UserException.PHONE_HOME_DOCS_URL + " for details.", required = false) public File gatkKeyFile = null; @@ -125,7 +125,14 @@ public class GATKArgumentCollection { @Argument(fullName = "downsample_to_fraction", shortName = "dfrac", doc = "Fraction [0.0-1.0] of reads to downsample to", required = false) public Double downsampleFraction = null; - @Argument(fullName = "downsample_to_coverage", shortName = "dcov", doc = "Coverage [integer] to downsample to at any given locus; note that downsampled reads are randomly selected from all possible reads at a locus. For non-locus-based traversals (eg., ReadWalkers), this sets the maximum number of reads at each alignment start position.", required = false) + @Argument(fullName = "downsample_to_coverage", shortName = "dcov", + doc = "Coverage [integer] to downsample to. For locus-based traversals (eg., LocusWalkers and ActiveRegionWalkers)," + + "this controls the maximum depth of coverage at each locus. For non-locus-based traversals (eg., ReadWalkers), " + + "this controls the maximum number of reads sharing the same alignment start position. Note that the " + + "coverage target is an approximate goal that is not guaranteed to be met exactly: the GATK's approach " + + "to downsampling is based on even representation of reads from all alignment start positions, and the " + + "downsampling algorithm will under some circumstances retain slightly more coverage than requested.", + required = false) public Integer downsampleCoverage = null; /** @@ -180,6 +187,12 @@ public class GATKArgumentCollection { @Argument(fullName = "allow_potentially_misencoded_quality_scores", shortName="allowPotentiallyMisencodedQuals", doc="Do not fail when encountering base qualities that are too high and that seemingly indicate a problem with the base quality encoding of the BAM file", required = false) public boolean ALLOW_POTENTIALLY_MISENCODED_QUALS = false; + @Argument(fullName="useOriginalQualities", shortName = "OQ", doc = "If set, use the original base quality scores from the OQ tag when present instead of the standard scores", required=false) + public Boolean useOriginalBaseQualities = false; + + @Argument(fullName="defaultBaseQualities", shortName = "DBQ", doc = "If reads are missing some or all base quality scores, this value will be used for all base quality scores", required=false) + public byte defaultBaseQualities = -1; + // -------------------------------------------------------------------------------------------------------------- // // performance log arguments @@ -194,9 +207,6 @@ public class GATKArgumentCollection { @Argument(fullName = "performanceLog", shortName="PF", doc="If provided, a GATK runtime performance log will be written to this file", required = false) public File performanceLog = null; - @Argument(fullName="useOriginalQualities", shortName = "OQ", doc = "If set, use the original base quality scores from the OQ tag when present instead of the standard scores", required=false) - public Boolean useOriginalBaseQualities = false; - // -------------------------------------------------------------------------------------------------------------- // // BQSR arguments @@ -260,9 +270,6 @@ public class GATKArgumentCollection { // // -------------------------------------------------------------------------------------------------------------- - @Argument(fullName="defaultBaseQualities", shortName = "DBQ", doc = "If reads are missing some or all base quality scores, this value will be used for all base quality scores", required=false) - public byte defaultBaseQualities = -1; - @Argument(fullName = "validation_strictness", shortName = "S", doc = "How strict should we be with validation", required = false) public SAMFileReader.ValidationStringency strictnessLevel = SAMFileReader.ValidationStringency.SILENT; diff --git a/public/java/src/org/broadinstitute/sting/gatk/arguments/ValidationExclusion.java b/public/java/src/org/broadinstitute/sting/gatk/arguments/ValidationExclusion.java index f8f56f89e..75a68d978 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/arguments/ValidationExclusion.java +++ b/public/java/src/org/broadinstitute/sting/gatk/arguments/ValidationExclusion.java @@ -36,6 +36,8 @@ public class ValidationExclusion { // our validation options public enum TYPE { + ALLOW_N_CIGAR_READS, // ignore the presence of N operators in CIGARs: do not blow up and process reads that contain one or more N operators. + // This exclusion does not have effect on reads that get filtered {@see MalformedReadFilter}. ALLOW_UNINDEXED_BAM, // allow bam files that do not have an index; we'll traverse them using monolithic shard ALLOW_UNSET_BAM_SORT_ORDER, // assume that the bam is sorted, even if the SO (sort-order) flag is not set NO_READ_ORDER_VERIFICATION, // do not validate that the reads are in order as we take them from the bam file diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalOverlappingRODsFromStream.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalOverlappingRODsFromStream.java index fe3a0c6ce..3aff745fa 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalOverlappingRODsFromStream.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalOverlappingRODsFromStream.java @@ -72,8 +72,6 @@ class IntervalOverlappingRODsFromStream { /** * Get the list of RODs overlapping loc from this stream of RODs. * - * Sequential calls to this function must obey the rule that loc2.getStart >= loc1.getStart - * * @param loc the interval to query * @return a non-null RODRecordList containing the overlapping RODs, which may be empty */ @@ -84,7 +82,6 @@ class IntervalOverlappingRODsFromStream { if ( lastQuery != null && loc.getStart() < lastQuery.getStart() ) throw new IllegalArgumentException(String.format("BUG: query interval (%s) starts before the previous interval %s", loc, lastQuery)); - trimCurrentFeaturesToLoc(loc); readOverlappingFutureFeatures(loc); return new RODRecordListImpl(name, subsetToOverlapping(loc, currentFeatures), loc); } @@ -128,11 +125,14 @@ class IntervalOverlappingRODsFromStream { /** * Update function. Remove all elements of currentFeatures that end before loc * + * Must be called by clients periodically when they know they they will never ask for data before + * loc, so that the running cache of RODs doesn't grow out of control. + * * @param loc the location to use */ @Requires("loc != null") @Ensures("currentFeatures.size() <= old(currentFeatures.size())") - private void trimCurrentFeaturesToLoc(final GenomeLoc loc) { + public void trimCurrentFeaturesToLoc(final GenomeLoc loc) { final ListIterator it = currentFeatures.listIterator(); while ( it.hasNext() ) { final GATKFeature feature = it.next(); diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedView.java new file mode 100644 index 000000000..5e884ce53 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedView.java @@ -0,0 +1,184 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.datasources.providers; + +import net.sf.picard.util.PeekableIterator; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.datasources.reads.ReadShard; +import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.refdata.utils.LocationAwareSeekableRODIterator; +import org.broadinstitute.sting.gatk.refdata.utils.RODRecordList; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +/** + * a ROD view that allows for requests for RODs that overlap intervals on the genome to produce a RefMetaDataTracker + */ +public class IntervalReferenceOrderedView implements ReferenceOrderedView { + /** a list of the RMDDataState (location->iterators) */ + private final List states = new ArrayList<>(1); + + /** + * Used to get genome locs for reads + */ + protected final GenomeLocParser genomeLocParser; + + /** + * The total extent of all reads in this span. We create iterators from our RODs + * from the start of this span, to the end. + */ + private final GenomeLoc shardSpan; + + /** + * Create a new IntervalReferenceOrderedView taking data from provider and capable of + * servicing ROD overlap requests within the genomic interval span + * + * @param provider a ShardDataProvider to give us data + * @param span a GenomeLoc span, or null indicating take the entire genome + */ + public IntervalReferenceOrderedView(final ShardDataProvider provider, final GenomeLoc span) { + if ( provider == null ) throw new IllegalArgumentException("provider cannot be null"); + if ( provider.hasReferenceOrderedData() && span == null ) throw new IllegalArgumentException("span cannot be null when provider has reference ordered data"); + + this.genomeLocParser = provider.getGenomeLocParser(); + this.shardSpan = span; + provider.register(this); + + // conditional to optimize the case where we don't have any ROD data + if ( provider.hasReferenceOrderedData() && ! shardSpan.isUnmapped() ) { + for (final ReferenceOrderedDataSource dataSource : provider.getReferenceOrderedData()) + states.add(new RMDDataState(dataSource, dataSource.seek(shardSpan))); + } + } + + /** + * Testing constructor + */ + protected IntervalReferenceOrderedView(final GenomeLocParser genomeLocParser, + final GenomeLoc shardSpan, + final List names, + final List> featureSources) { + this.genomeLocParser = genomeLocParser; + this.shardSpan = shardSpan; + for ( int i = 0; i < names.size(); i++ ) + states.add(new RMDDataState(names.get(i), featureSources.get(i))); + } + + public Collection> getConflictingViews() { + List> classes = new ArrayList<>(); + classes.add(ManagingReferenceOrderedView.class); + return classes; + } + + /** + * Get a RefMetaDataTracker containing bindings for all RODs overlapping the start position of loc + * @param loc a GenomeLoc of size == 1 + * @return a non-null RefMetaDataTracker + */ + @Override + public RefMetaDataTracker getReferenceOrderedDataAtLocus(GenomeLoc loc) { + if ( loc == null ) throw new IllegalArgumentException("loc cannot be null"); + if ( loc.size() != 1 ) throw new IllegalArgumentException("GenomeLoc must have size == 1 but got " + loc); + return getReferenceOrderedDataForInterval(loc); + } + + /** + * Get a RefMetaDataTracker containing bindings for all RODs overlapping interval + * + * @param interval a non=null interval + * @return a non-null RefMetaDataTracker + */ + public RefMetaDataTracker getReferenceOrderedDataForInterval(final GenomeLoc interval) { + if ( interval == null ) throw new IllegalArgumentException("Interval cannot be null"); + + if ( states.isEmpty() || shardSpan.isUnmapped() ) // optimization for no bindings (common for read walkers) + return RefMetaDataTracker.EMPTY_TRACKER; + else { + final List bindings = new ArrayList<>(states.size()); + for ( final RMDDataState state : states ) + bindings.add(state.stream.getOverlapping(interval)); + return new RefMetaDataTracker(bindings); + } + } + + /** + * Trim down all of the ROD managers so that they only hold ROD bindings wit start >= startOfDataToKeep.getStart() + * + * @param startOfDataToKeep a non-null genome loc + */ + public void trimCurrentFeaturesToLoc(final GenomeLoc startOfDataToKeep) { + if ( startOfDataToKeep == null ) throw new IllegalArgumentException("startOfDataToKeep cannot be null"); + + for ( final RMDDataState state : states ) + state.stream.trimCurrentFeaturesToLoc(startOfDataToKeep); + } + + /** + * Closes the current view. + */ + public void close() { + for (final RMDDataState state : states) + state.close(); + + // Clear out the existing data so that post-close() accesses to this data will fail-fast. + states.clear(); + } + + /** + * Models the traversal state of a given ROD lane. + */ + private static class RMDDataState { + public final ReferenceOrderedDataSource dataSource; + public final IntervalOverlappingRODsFromStream stream; + private final LocationAwareSeekableRODIterator iterator; + + public RMDDataState(ReferenceOrderedDataSource dataSource, LocationAwareSeekableRODIterator iterator) { + this.dataSource = dataSource; + this.iterator = iterator; + this.stream = new IntervalOverlappingRODsFromStream(dataSource.getName(), new PeekableIterator<>(iterator)); + } + + /** + * For testing + */ + public RMDDataState(final String name, final PeekableIterator iterator) { + this.dataSource = null; + this.iterator = null; + this.stream = new IntervalOverlappingRODsFromStream(name, new PeekableIterator<>(iterator)); + } + + public void close() { + if ( dataSource != null ) + dataSource.close( iterator ); + } + } +} + diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusReferenceView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusReferenceView.java index d5b7d0487..b5efbc693 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusReferenceView.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/LocusReferenceView.java @@ -176,13 +176,13 @@ public class LocusReferenceView extends ReferenceView { /** * Gets the reference context associated with this particular point or extended interval on the genome. - * @param genomeLoc Region for which to retrieve the base(s). If region spans beyond contig end or beoynd current bounds, it will be trimmed down. + * @param genomeLoc Region for which to retrieve the base(s). If region spans beyond contig end or beyond current bounds, it will be trimmed down. * @return The base at the position represented by this genomeLoc. */ public ReferenceContext getReferenceContext( GenomeLoc genomeLoc ) { //validateLocation( genomeLoc ); - GenomeLoc window = genomeLocParser.createGenomeLoc( genomeLoc.getContig(), bounds.getContigIndex(), + GenomeLoc window = genomeLocParser.createGenomeLoc( genomeLoc.getContig(), genomeLoc.getContigIndex(), getWindowStart(genomeLoc), getWindowStop(genomeLoc) ); int refStart = -1; diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ManagingReferenceOrderedView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ManagingReferenceOrderedView.java index 09b72f5eb..50f2369cb 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ManagingReferenceOrderedView.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ManagingReferenceOrderedView.java @@ -76,7 +76,8 @@ public class ManagingReferenceOrderedView implements ReferenceOrderedView { * @param loc Locus at which to track. * @return A tracker containing information about this locus. */ - public RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc, ReferenceContext referenceContext ) { + @Override + public RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc ) { if ( states.isEmpty() ) return RefMetaDataTracker.EMPTY_TRACKER; else { diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedView.java index 52f490972..84e27c953 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedView.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedView.java @@ -42,52 +42,9 @@ import java.util.Collection; import java.util.List; /** a ROD view for reads. This provides the Read traversals a way of getting a RefMetaDataTracker */ -public class ReadBasedReferenceOrderedView implements View { - // a list of the RMDDataState (location->iterators) - private final List states = new ArrayList(1); - private final static RefMetaDataTracker EMPTY_TRACKER = new RefMetaDataTracker(); - - /** - * Used to get genome locs for reads - */ - private final GenomeLocParser genomeLocParser; - - /** - * The total extent of all reads in this span. We create iterators from our RODs - * from the start of this span, to the end. - */ - private final GenomeLoc shardSpan; - +public class ReadBasedReferenceOrderedView extends IntervalReferenceOrderedView { public ReadBasedReferenceOrderedView(final ShardDataProvider provider) { - this.genomeLocParser = provider.getGenomeLocParser(); - // conditional to optimize the case where we don't have any ROD data - this.shardSpan = provider.getReferenceOrderedData() != null ? ((ReadShard)provider.getShard()).getReadsSpan() : null; - provider.register(this); - - if ( provider.getReferenceOrderedData() != null && ! shardSpan.isUnmapped() ) { - for (ReferenceOrderedDataSource dataSource : provider.getReferenceOrderedData()) - states.add(new RMDDataState(dataSource, dataSource.seek(shardSpan))); - } - } - - - /** - * Testing constructor - */ - protected ReadBasedReferenceOrderedView(final GenomeLocParser genomeLocParser, - final GenomeLoc shardSpan, - final List names, - final List> featureSources) { - this.genomeLocParser = genomeLocParser; - this.shardSpan = shardSpan; - for ( int i = 0; i < names.size(); i++ ) - states.add(new RMDDataState(names.get(i), featureSources.get(i))); - } - - public Collection> getConflictingViews() { - List> classes = new ArrayList>(); - classes.add(ManagingReferenceOrderedView.class); - return classes; + super(provider, provider.hasReferenceOrderedData() ? ((ReadShard)provider.getShard()).getReadsSpan() : null); } /** @@ -101,60 +58,11 @@ public class ReadBasedReferenceOrderedView implements View { @Ensures("result != null") public RefMetaDataTracker getReferenceOrderedDataForRead(final SAMRecord rec) { if ( rec.getReadUnmappedFlag() ) - // empty RODs for unmapped reads - return new RefMetaDataTracker(); - else - return getReferenceOrderedDataForInterval(genomeLocParser.createGenomeLoc(rec)); - } - - @Requires({"interval != null", "shardSpan == null || shardSpan.isUnmapped() || shardSpan.containsP(interval)"}) - @Ensures("result != null") - public RefMetaDataTracker getReferenceOrderedDataForInterval(final GenomeLoc interval) { - if ( states.isEmpty() || shardSpan.isUnmapped() ) // optimization for no bindings (common for read walkers) - return EMPTY_TRACKER; + return RefMetaDataTracker.EMPTY_TRACKER; else { - final List bindings = new ArrayList(states.size()); - for ( final RMDDataState state : states ) - bindings.add(state.stream.getOverlapping(interval)); - return new RefMetaDataTracker(bindings); - } - } - - /** - * Closes the current view. - */ - public void close() { - for (final RMDDataState state : states) - state.close(); - - // Clear out the existing data so that post-close() accesses to this data will fail-fast. - states.clear(); - } - - /** Models the traversal state of a given ROD lane. */ - private static class RMDDataState { - public final ReferenceOrderedDataSource dataSource; - public final IntervalOverlappingRODsFromStream stream; - private final LocationAwareSeekableRODIterator iterator; - - public RMDDataState(ReferenceOrderedDataSource dataSource, LocationAwareSeekableRODIterator iterator) { - this.dataSource = dataSource; - this.iterator = iterator; - this.stream = new IntervalOverlappingRODsFromStream(dataSource.getName(), new PeekableIterator(iterator)); - } - - /** - * For testing - */ - public RMDDataState(final String name, final PeekableIterator iterator) { - this.dataSource = null; - this.iterator = null; - this.stream = new IntervalOverlappingRODsFromStream(name, new PeekableIterator(iterator)); - } - - public void close() { - if ( dataSource != null ) - dataSource.close( iterator ); + final GenomeLoc readSpan = genomeLocParser.createGenomeLoc(rec); + trimCurrentFeaturesToLoc(readSpan); + return getReferenceOrderedDataForInterval(readSpan); } } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedView.java index fa83dff82..85c20a6c3 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedView.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedView.java @@ -25,10 +25,9 @@ package org.broadinstitute.sting.gatk.datasources.providers; -import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.utils.GenomeLoc; public interface ReferenceOrderedView extends View { - RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc, ReferenceContext refContext ); + RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc ); } diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/RodLocusView.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/RodLocusView.java index 3fb4c7352..1b6c14628 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/RodLocusView.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/providers/RodLocusView.java @@ -98,7 +98,8 @@ public class RodLocusView extends LocusView implements ReferenceOrderedView { rodQueue = new RODMergingIterator(iterators); } - public RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc, ReferenceContext referenceContext ) { + @Override + public RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc ) { // special case the interval again -- add it into the ROD if ( interval != null ) { allTracksHere.add(interval); } return new RefMetaDataTracker(allTracksHere); diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/ReadShardBalancer.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/ReadShardBalancer.java index 7772dbc1f..dc1b80efd 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/ReadShardBalancer.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/ReadShardBalancer.java @@ -177,7 +177,9 @@ public class ReadShardBalancer extends ShardBalancer { currentContigFilePointer = null; List nextContigFilePointers = new ArrayList(); - logger.info("Loading BAM index data for next contig"); + if ( filePointers.hasNext() ) { + logger.info("Loading BAM index data"); + } while ( filePointers.hasNext() ) { @@ -215,8 +217,8 @@ public class ReadShardBalancer extends ShardBalancer { } if ( currentContigFilePointer != null ) { - logger.info("Done loading BAM index data for next contig"); - logger.debug(String.format("Next contig FilePointer: %s", currentContigFilePointer)); + logger.info("Done loading BAM index data"); + logger.debug(String.format("Next FilePointer: %s", currentContigFilePointer)); } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java index 1223dd2af..a36667ec4 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/reads/SAMDataSource.java @@ -440,9 +440,8 @@ public class SAMDataSource { * @return Cumulative read metrics. */ public ReadMetrics getCumulativeReadMetrics() { - synchronized(readMetrics) { - return readMetrics.clone(); - } + // don't return a clone here because the engine uses a pointer to this object + return readMetrics; } /** @@ -450,9 +449,7 @@ public class SAMDataSource { * @param readMetrics The 'incremental' read metrics, to be incorporated into the cumulative metrics. */ public void incorporateReadMetrics(final ReadMetrics readMetrics) { - synchronized(this.readMetrics) { - this.readMetrics.incrementMetrics(readMetrics); - } + this.readMetrics.incrementMetrics(readMetrics); } public StingSAMIterator seek(Shard shard) { @@ -548,7 +545,10 @@ public class SAMDataSource { MergingSamRecordIterator mergingIterator = readers.createMergingIterator(iteratorMap); - return applyDecoratingIterators(shard.getReadMetrics(), + // The readMetrics object being passed in should be that of this dataSource and NOT the shard: the dataSource's + // metrics is intended to keep track of the reads seen (and hence passed to the CountingFilteringIterator when + // we apply the decorators), whereas the shard's metrics is used to keep track the "records" seen. + return applyDecoratingIterators(readMetrics, enableVerification, readProperties.useOriginalBaseQualities(), new ReleasingIterator(readers,StingSAMIteratorAdapter.adapt(mergingIterator)), @@ -625,12 +625,15 @@ public class SAMDataSource { byte defaultBaseQualities, boolean isLocusBasedTraversal ) { - // ************************************************************************************************ // - // * NOTE: ALL FILTERING/DOWNSAMPLING SHOULD BE DONE BEFORE ANY ITERATORS THAT MODIFY THE READS! * // - // * (otherwise we will process something that we may end up throwing away) * // - // ************************************************************************************************ // + // Always apply the ReadFormattingIterator before both ReadFilters and ReadTransformers. At a minimum, + // this will consolidate the cigar strings into canonical form. This has to be done before the read + // filtering, because not all read filters will behave correctly with things like zero-length cigar + // elements. If useOriginalBaseQualities is true or defaultBaseQualities >= 0, this iterator will also + // modify the base qualities. + wrappedIterator = new ReadFormattingIterator(wrappedIterator, useOriginalBaseQualities, defaultBaseQualities); - // Filters: + // Read Filters: these are applied BEFORE downsampling, so that we downsample within the set of reads + // that actually survive filtering. Otherwise we could get much less coverage than requested. wrappedIterator = StingSAMIteratorAdapter.adapt(new CountingFilteringIterator(readMetrics,wrappedIterator,supplementalFilters)); // Downsampling: @@ -654,11 +657,8 @@ public class SAMDataSource { if (!noValidationOfReadOrder && enableVerification) wrappedIterator = new VerifyingSamIterator(wrappedIterator); - if (useOriginalBaseQualities || defaultBaseQualities >= 0) - // only wrap if we are replacing the original qualities or using a default base quality - wrappedIterator = new ReadFormattingIterator(wrappedIterator, useOriginalBaseQualities, defaultBaseQualities); - - // set up read transformers + // Read transformers: these are applied last, so that we don't bother transforming reads that get discarded + // by the read filters or downsampler. for ( final ReadTransformer readTransformer : readTransformers ) { if ( readTransformer.enabled() && readTransformer.getApplicationTime() == ReadTransformer.ApplicationTime.ON_INPUT ) wrappedIterator = new ReadTransformingIterator(wrappedIterator, readTransformer); diff --git a/public/java/src/org/broadinstitute/sting/gatk/datasources/reference/ReferenceDataSource.java b/public/java/src/org/broadinstitute/sting/gatk/datasources/reference/ReferenceDataSource.java index 01edd44ba..edd3d324c 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/datasources/reference/ReferenceDataSource.java +++ b/public/java/src/org/broadinstitute/sting/gatk/datasources/reference/ReferenceDataSource.java @@ -68,8 +68,8 @@ public class ReferenceDataSource { final File indexFile = new File(fastaFile.getAbsolutePath() + ".fai"); // determine the name for the dict file - final String fastaExt = fastaFile.getAbsolutePath().endsWith("fa") ? ".fa" : ".fasta"; - final File dictFile = new File(fastaFile.getAbsolutePath().replace(fastaExt, ".dict")); + final String fastaExt = fastaFile.getAbsolutePath().endsWith("fa") ? "\\.fa$" : "\\.fasta$"; + final File dictFile = new File(fastaFile.getAbsolutePath().replaceAll(fastaExt, ".dict")); // It's an error if either the fai or dict file does not exist. The user is now responsible // for creating these files. diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/Downsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/Downsampler.java index 23b16cff2..466ade1ed 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/Downsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/Downsampler.java @@ -25,19 +25,27 @@ package org.broadinstitute.sting.gatk.downsampling; +import org.broadinstitute.sting.utils.locusiterator.AlignmentStateMachine; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; + import java.util.Collection; import java.util.List; /** * The basic downsampler API, with no reads-specific operations. * - * Downsamplers that extend this interface rather than the ReadsDownsampler interface can handle + * Downsamplers that extend this class rather than the ReadsDownsampler class can handle * any kind of item, however they cannot be wrapped within a DownsamplingReadsIterator or a * PerSampleDownsamplingReadsIterator. * * @author David Roazen */ -public interface Downsampler { +public abstract class Downsampler { + + /** + * Number of items discarded by this downsampler since the last call to resetStats() + */ + protected int numDiscardedItems = 0; /** * Submit one item to the downsampler for consideration. Some downsamplers will be able to determine @@ -46,7 +54,7 @@ public interface Downsampler { * * @param item the individual item to submit to the downsampler for consideration */ - public void submit( T item ); + public abstract void submit( final T item ); /** * Submit a collection of items to the downsampler for consideration. Should be equivalent to calling @@ -54,21 +62,29 @@ public interface Downsampler { * * @param items the collection of items to submit to the downsampler for consideration */ - public void submit( Collection items ); + public void submit( final Collection items ) { + if ( items == null ) { + throw new IllegalArgumentException("submitted items must not be null"); + } + + for ( final T item : items ) { + submit(item); + } + } /** * Are there items that have survived the downsampling process waiting to be retrieved? * * @return true if this downsampler has > 0 finalized items, otherwise false */ - public boolean hasFinalizedItems(); + public abstract boolean hasFinalizedItems(); /** * Return (and *remove*) all items that have survived downsampling and are waiting to be retrieved. * * @return a list of all finalized items this downsampler contains, or an empty list if there are none */ - public List consumeFinalizedItems(); + public abstract List consumeFinalizedItems(); /** * Are there items stored in this downsampler that it doesn't yet know whether they will @@ -76,7 +92,7 @@ public interface Downsampler { * * @return true if this downsampler has > 0 pending items, otherwise false */ - public boolean hasPendingItems(); + public abstract boolean hasPendingItems(); /** * Peek at the first finalized item stored in this downsampler (or null if there are no finalized items) @@ -84,7 +100,7 @@ public interface Downsampler { * @return the first finalized item in this downsampler (the item is not removed from the downsampler by this call), * or null if there are none */ - public T peekFinalized(); + public abstract T peekFinalized(); /** * Peek at the first pending item stored in this downsampler (or null if there are no pending items) @@ -92,7 +108,7 @@ public interface Downsampler { * @return the first pending item stored in this downsampler (the item is not removed from the downsampler by this call), * or null if there are none */ - public T peekPending(); + public abstract T peekPending(); /** * Get the current number of items in this downsampler @@ -103,7 +119,7 @@ public interface Downsampler { * * @return a positive integer */ - public int size(); + public abstract int size(); /** * Returns the number of items discarded (so far) during the downsampling process @@ -111,21 +127,46 @@ public interface Downsampler { * @return the number of items that have been submitted to this downsampler and discarded in the process of * downsampling */ - public int getNumberOfDiscardedItems(); + public int getNumberOfDiscardedItems() { + return numDiscardedItems; + } /** * Used to tell the downsampler that no more items will be submitted to it, and that it should * finalize any pending items. */ - public void signalEndOfInput(); + public abstract void signalEndOfInput(); /** * Empty the downsampler of all finalized/pending items */ - public void clear(); + public abstract void clearItems(); /** * Reset stats in the downsampler such as the number of discarded items *without* clearing the downsampler of items */ - public void reset(); + public void resetStats() { + numDiscardedItems = 0; + } + + /** + * Indicates whether an item should be excluded from elimination during downsampling. By default, + * all items representing reduced reads are excluded from downsampling, but individual downsamplers + * may override if they are able to handle reduced reads correctly. Downsamplers should check + * the return value of this method before discarding an item. + * + * @param item The item to test + * @return true if the item should not be subject to elimination during downsampling, otherwise false + */ + protected boolean doNotDiscardItem( final Object item ) { + // Use getClass() rather than instanceof for performance reasons. Ugly but fast. + if ( item.getClass() == GATKSAMRecord.class ) { + return ((GATKSAMRecord)item).isReducedRead(); + } + else if ( item.getClass() == AlignmentStateMachine.class ) { + return ((AlignmentStateMachine)item).isReducedRead(); + } + + return false; + } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/DownsamplingMethod.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/DownsamplingMethod.java index 5aa27608d..8e92b1ff3 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/DownsamplingMethod.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/DownsamplingMethod.java @@ -61,20 +61,10 @@ public class DownsamplingMethod { public static final DownsampleType DEFAULT_DOWNSAMPLING_TYPE = DownsampleType.BY_SAMPLE; /** - * Default target coverage for locus-based traversals + * Don't allow dcov values below this threshold for locus-based traversals (ie., Locus + * and ActiveRegion walkers), as they can result in problematic downsampling artifacts */ - public static final int DEFAULT_LOCUS_TRAVERSAL_DOWNSAMPLING_COVERAGE = 1000; - - /** - * Default downsampling method for locus-based traversals - */ - public static final DownsamplingMethod DEFAULT_LOCUS_TRAVERSAL_DOWNSAMPLING_METHOD = - new DownsamplingMethod(DEFAULT_DOWNSAMPLING_TYPE, DEFAULT_LOCUS_TRAVERSAL_DOWNSAMPLING_COVERAGE, null); - - /** - * Default downsampling method for read-based traversals - */ - public static final DownsamplingMethod DEFAULT_READ_TRAVERSAL_DOWNSAMPLING_METHOD = NONE; + public static final int MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS = 200; public DownsamplingMethod( DownsampleType type, Integer toCoverage, Double toFraction ) { @@ -118,6 +108,16 @@ public class DownsamplingMethod { if ( isLocusTraversal && type == DownsampleType.ALL_READS && toCoverage != null ) { throw new UserException("Downsampling to coverage with the ALL_READS method for locus-based traversals (eg., LocusWalkers) is not currently supported (though it is supported for ReadWalkers)."); } + + // For locus traversals, ensure that the dcov value (if present) is not problematically low + if ( isLocusTraversal && type != DownsampleType.NONE && toCoverage != null && + toCoverage < MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS ) { + throw new UserException(String.format("Locus-based traversals (ie., Locus and ActiveRegion walkers) require " + + "a minimum -dcov value of %d when downsampling to coverage. Values less " + + "than this can produce problematic downsampling artifacts while providing " + + "only insignificant improvements in memory usage in most cases.", + MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS)); + } } public String toString() { @@ -139,13 +139,4 @@ public class DownsamplingMethod { return builder.toString(); } - - public static DownsamplingMethod getDefaultDownsamplingMethod( Walker walker ) { - if ( walker instanceof LocusWalker || walker instanceof ActiveRegionWalker ) { - return DEFAULT_LOCUS_TRAVERSAL_DOWNSAMPLING_METHOD; - } - else { - return DEFAULT_READ_TRAVERSAL_DOWNSAMPLING_METHOD; - } - } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/FractionalDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/FractionalDownsampler.java index 1cede9c33..c40f8019e 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/FractionalDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/FractionalDownsampler.java @@ -30,7 +30,6 @@ import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import java.util.ArrayList; -import java.util.Collection; import java.util.List; /** @@ -41,13 +40,11 @@ import java.util.List; * * @author David Roazen */ -public class FractionalDownsampler implements ReadsDownsampler { +public class FractionalDownsampler extends ReadsDownsampler { private ArrayList selectedReads; - private int cutoffForInclusion; - - private int numDiscardedItems; + private final int cutoffForInclusion; private static final int RANDOM_POOL_SIZE = 10000; @@ -57,18 +54,19 @@ public class FractionalDownsampler implements ReadsDownsamp * @param fraction Fraction of reads to preserve, between 0.0 (inclusive) and 1.0 (inclusive). * Actual number of reads preserved may differ randomly. */ - public FractionalDownsampler( double fraction ) { + public FractionalDownsampler( final double fraction ) { if ( fraction < 0.0 || fraction > 1.0 ) { throw new ReviewedStingException("Fraction of reads to include must be between 0.0 and 1.0, inclusive"); } cutoffForInclusion = (int)(fraction * RANDOM_POOL_SIZE); - clear(); - reset(); + clearItems(); + resetStats(); } - public void submit( T newRead ) { - if ( GenomeAnalysisEngine.getRandomGenerator().nextInt(10000) < cutoffForInclusion ) { + @Override + public void submit( final T newRead ) { + if ( GenomeAnalysisEngine.getRandomGenerator().nextInt(10000) < cutoffForInclusion || doNotDiscardItem(newRead) ) { selectedReads.add(newRead); } else { @@ -76,61 +74,56 @@ public class FractionalDownsampler implements ReadsDownsamp } } - public void submit( Collection newReads ) { - for ( T read : newReads ) { - submit(read); - } - } - + @Override public boolean hasFinalizedItems() { return selectedReads.size() > 0; } + @Override public List consumeFinalizedItems() { // pass by reference rather than make a copy, for speed List downsampledItems = selectedReads; - clear(); + clearItems(); return downsampledItems; } + @Override public boolean hasPendingItems() { return false; } + @Override public T peekFinalized() { return selectedReads.isEmpty() ? null : selectedReads.get(0); } + @Override public T peekPending() { return null; } - public int getNumberOfDiscardedItems() { - return numDiscardedItems; - } - @Override public int size() { return selectedReads.size(); } + @Override public void signalEndOfInput() { // NO-OP } - public void clear() { + @Override + public void clearItems() { selectedReads = new ArrayList(); } - public void reset() { - numDiscardedItems = 0; - } - + @Override public boolean requiresCoordinateSortOrder() { return false; } - public void signalNoMoreReadsBefore( T read ) { + @Override + public void signalNoMoreReadsBefore( final T read ) { // NO-OP } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/LevelingDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/LevelingDownsampler.java index 4ff729537..3ce4d09d6 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/LevelingDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/LevelingDownsampler.java @@ -46,16 +46,15 @@ import java.util.*; * * @author David Roazen */ -public class LevelingDownsampler, E> implements Downsampler { +public class LevelingDownsampler, E> extends Downsampler { private final int minElementsPerStack; + private final int targetSize; private List groups; private boolean groupsAreFinalized; - private int numDiscardedItems; - /** * Construct a LevelingDownsampler * @@ -65,7 +64,7 @@ public class LevelingDownsampler, E> implements Downsampler * this value -- if it does, items are removed from Lists evenly until the total size * is <= this value */ - public LevelingDownsampler( int targetSize ) { + public LevelingDownsampler( final int targetSize ) { this(targetSize, 1); } @@ -79,55 +78,58 @@ public class LevelingDownsampler, E> implements Downsampler * if a stack has only 3 elements and minElementsPerStack is 3, no matter what * we'll not reduce this stack below 3. */ - public LevelingDownsampler(final int targetSize, final int minElementsPerStack) { + public LevelingDownsampler( final int targetSize, final int minElementsPerStack ) { if ( targetSize < 0 ) throw new IllegalArgumentException("targetSize must be >= 0 but got " + targetSize); if ( minElementsPerStack < 0 ) throw new IllegalArgumentException("minElementsPerStack must be >= 0 but got " + minElementsPerStack); this.targetSize = targetSize; this.minElementsPerStack = minElementsPerStack; - clear(); - reset(); + clearItems(); + resetStats(); } - public void submit( T item ) { + @Override + public void submit( final T item ) { groups.add(item); } - public void submit( Collection items ){ + @Override + public void submit( final Collection items ){ groups.addAll(items); } + @Override public boolean hasFinalizedItems() { return groupsAreFinalized && groups.size() > 0; } + @Override public List consumeFinalizedItems() { if ( ! hasFinalizedItems() ) { return new ArrayList(); } // pass by reference rather than make a copy, for speed - List toReturn = groups; - clear(); + final List toReturn = groups; + clearItems(); return toReturn; } + @Override public boolean hasPendingItems() { return ! groupsAreFinalized && groups.size() > 0; } + @Override public T peekFinalized() { return hasFinalizedItems() ? groups.get(0) : null; } + @Override public T peekPending() { return hasPendingItems() ? groups.get(0) : null; } - public int getNumberOfDiscardedItems() { - return numDiscardedItems; - } - @Override public int size() { int s = 0; @@ -137,26 +139,24 @@ public class LevelingDownsampler, E> implements Downsampler return s; } + @Override public void signalEndOfInput() { levelGroups(); groupsAreFinalized = true; } - public void clear() { + @Override + public void clearItems() { groups = new ArrayList(); groupsAreFinalized = false; } - public void reset() { - numDiscardedItems = 0; - } - private void levelGroups() { + final int[] groupSizes = new int[groups.size()]; int totalSize = 0; - int[] groupSizes = new int[groups.size()]; int currentGroupIndex = 0; - for ( T group : groups ) { + for ( final T group : groups ) { groupSizes[currentGroupIndex] = group.size(); totalSize += groupSizes[currentGroupIndex]; currentGroupIndex++; @@ -191,20 +191,18 @@ public class LevelingDownsampler, E> implements Downsampler // Now we actually go through and reduce each group to its new count as specified in groupSizes currentGroupIndex = 0; - for ( T group : groups ) { + for ( final T group : groups ) { downsampleOneGroup(group, groupSizes[currentGroupIndex]); currentGroupIndex++; } } - private void downsampleOneGroup( T group, int numItemsToKeep ) { + private void downsampleOneGroup( final T group, final int numItemsToKeep ) { if ( numItemsToKeep >= group.size() ) { return; } - numDiscardedItems += group.size() - numItemsToKeep; - - BitSet itemsToKeep = new BitSet(group.size()); + final BitSet itemsToKeep = new BitSet(group.size()); for ( Integer selectedIndex : MathUtils.sampleIndicesWithoutReplacement(group.size(), numItemsToKeep) ) { itemsToKeep.set(selectedIndex); } @@ -213,12 +211,13 @@ public class LevelingDownsampler, E> implements Downsampler // If our group is a linked list, we can remove the desired items in a single O(n) pass with an iterator if ( group instanceof LinkedList ) { - Iterator iter = group.iterator(); + final Iterator iter = group.iterator(); while ( iter.hasNext() ) { - iter.next(); + final E item = iter.next(); - if ( ! itemsToKeep.get(currentIndex) ) { + if ( ! itemsToKeep.get(currentIndex) && ! doNotDiscardItem(item) ) { iter.remove(); + numDiscardedItems++; } currentIndex++; @@ -227,14 +226,15 @@ public class LevelingDownsampler, E> implements Downsampler // If it's not a linked list, it's more efficient to copy the desired items into a new list and back rather // than suffer O(n^2) of item shifting else { - List keptItems = new ArrayList(numItemsToKeep); + final List keptItems = new ArrayList(group.size()); - for ( E item : group ) { - if ( itemsToKeep.get(currentIndex) ) { + for ( final E item : group ) { + if ( itemsToKeep.get(currentIndex) || doNotDiscardItem(item) ) { keptItems.add(item); } currentIndex++; } + numDiscardedItems += group.size() - keptItems.size(); group.clear(); group.addAll(keptItems); } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/PassThroughDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/PassThroughDownsampler.java index 3aaed6c73..1eabf5038 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/PassThroughDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/PassThroughDownsampler.java @@ -27,7 +27,6 @@ package org.broadinstitute.sting.gatk.downsampling; import net.sf.samtools.SAMRecord; -import java.util.Collection; import java.util.LinkedList; import java.util.List; @@ -39,25 +38,21 @@ import java.util.List; * * @author David Roazen */ -public class PassThroughDownsampler implements ReadsDownsampler { +public class PassThroughDownsampler extends ReadsDownsampler { private LinkedList selectedReads; public PassThroughDownsampler() { - clear(); + clearItems(); } + @Override public void submit( T newRead ) { // All reads pass-through, no reads get downsampled selectedReads.add(newRead); } - public void submit( Collection newReads ) { - for ( T read : newReads ) { - submit(read); - } - } - + @Override public boolean hasFinalizedItems() { return ! selectedReads.isEmpty(); } @@ -66,50 +61,50 @@ public class PassThroughDownsampler implements ReadsDownsam * Note that this list is a linked list and so doesn't support fast random access * @return */ + @Override public List consumeFinalizedItems() { // pass by reference rather than make a copy, for speed - List downsampledItems = selectedReads; - clear(); + final List downsampledItems = selectedReads; + clearItems(); return downsampledItems; } + @Override public boolean hasPendingItems() { return false; } + @Override public T peekFinalized() { return selectedReads.isEmpty() ? null : selectedReads.getFirst(); } + @Override public T peekPending() { return null; } - public int getNumberOfDiscardedItems() { - return 0; - } - @Override public int size() { return selectedReads.size(); } + @Override public void signalEndOfInput() { // NO-OP } - public void clear() { + @Override + public void clearItems() { selectedReads = new LinkedList(); } - public void reset() { - // NO-OP - } - + @Override public boolean requiresCoordinateSortOrder() { return false; } + @Override public void signalNoMoreReadsBefore( T read ) { // NO-OP } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReadsDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReadsDownsampler.java index a878d7553..a8df014e5 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReadsDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReadsDownsampler.java @@ -32,14 +32,14 @@ import net.sf.samtools.SAMRecord; * * @author David Roazen */ -public interface ReadsDownsampler extends Downsampler { +public abstract class ReadsDownsampler extends Downsampler { /** * Does this downsampler require that reads be fed to it in coordinate order? * * @return true if reads must be submitted to this downsampler in coordinate order, otherwise false */ - public boolean requiresCoordinateSortOrder(); + public abstract boolean requiresCoordinateSortOrder(); /** * Tell this downsampler that no more reads located before the provided read (according to @@ -52,5 +52,5 @@ public interface ReadsDownsampler extends Downsampler { * @param read the downsampler will assume that no reads located before this read will ever * be submitted to it in the future */ - public void signalNoMoreReadsBefore( T read ); + public abstract void signalNoMoreReadsBefore( final T read ); } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsampler.java index 0e6bbfcb6..ff085d17b 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsampler.java @@ -39,7 +39,12 @@ import java.util.*; * * @author David Roazen */ -public class ReservoirDownsampler implements ReadsDownsampler { +public class ReservoirDownsampler extends ReadsDownsampler { + + /** + * size of our reservoir -- ie., the maximum number of reads from the stream that will be retained + * (not including any undiscardable items) + */ private final int targetSampleSize; /** @@ -58,17 +63,33 @@ public class ReservoirDownsampler implements ReadsDownsampl */ private List reservoir; + /** + * Certain items (eg., reduced reads) cannot be discarded at all during downsampling. We store + * these items separately so as not to impact the fair selection of items for inclusion in the + * reservoir. These items are returned (and cleared) along with any items in the reservoir in + * calls to consumeFinalizedItems(). + */ + private List undiscardableItems; + + /** + * Are we currently using a linked list for the reservoir? + */ private boolean isLinkedList; - private int totalReadsSeen; + /** + * Count of the number of reads seen that were actually eligible for discarding. Used by the reservoir downsampling + * algorithm to ensure that all discardable reads have an equal chance of making it into the reservoir. + */ + private int totalDiscardableReadsSeen; - private int numDiscardedItems; /** * Construct a ReservoirDownsampler * * @param targetSampleSize Size of the reservoir used by this downsampler. Number of items retained - * after downsampling will be min(totalReads, targetSampleSize) + * after downsampling will be min(totalDiscardableReads, targetSampleSize) + any + * undiscardable reads (eg., reduced reads). + * * @param expectFewOverflows if true, this downsampler will be optimized for the case * where most of the time we won't fill up anything like the * targetSampleSize elements. If this is false, we will allocate @@ -76,15 +97,15 @@ public class ReservoirDownsampler implements ReadsDownsampl * the cost of allocation if we often use targetSampleSize or more * elements. */ - public ReservoirDownsampler ( final int targetSampleSize, final boolean expectFewOverflows) { + public ReservoirDownsampler ( final int targetSampleSize, final boolean expectFewOverflows ) { if ( targetSampleSize <= 0 ) { throw new ReviewedStingException("Cannot do reservoir downsampling with a sample size <= 0"); } this.targetSampleSize = targetSampleSize; this.expectFewOverflows = expectFewOverflows; - clear(); - reset(); + clearItems(); + resetStats(); } /** @@ -93,15 +114,21 @@ public class ReservoirDownsampler implements ReadsDownsampl * @param targetSampleSize Size of the reservoir used by this downsampler. Number of items retained * after downsampling will be min(totalReads, targetSampleSize) */ - public ReservoirDownsampler ( int targetSampleSize ) { + public ReservoirDownsampler ( final int targetSampleSize ) { this(targetSampleSize, false); } + @Override + public void submit ( final T newRead ) { + if ( doNotDiscardItem(newRead) ) { + undiscardableItems.add(newRead); + return; + } - public void submit ( T newRead ) { - totalReadsSeen++; + // Only count reads that are actually eligible for discarding for the purposes of the reservoir downsampling algorithm + totalDiscardableReadsSeen++; - if ( totalReadsSeen <= targetSampleSize ) { + if ( totalDiscardableReadsSeen <= targetSampleSize ) { reservoir.add(newRead); } else { @@ -110,7 +137,7 @@ public class ReservoirDownsampler implements ReadsDownsampl isLinkedList = false; } - final int randomSlot = GenomeAnalysisEngine.getRandomGenerator().nextInt(totalReadsSeen); + final int randomSlot = GenomeAnalysisEngine.getRandomGenerator().nextInt(totalDiscardableReadsSeen); if ( randomSlot < targetSampleSize ) { reservoir.set(randomSlot, newRead); } @@ -118,49 +145,46 @@ public class ReservoirDownsampler implements ReadsDownsampl } } - public void submit ( Collection newReads ) { - for ( T read : newReads ) { - submit(read); - } - } - + @Override public boolean hasFinalizedItems() { - return reservoir.size() > 0; + return ! reservoir.isEmpty() || ! undiscardableItems.isEmpty(); } + @Override public List consumeFinalizedItems() { - if ( reservoir.isEmpty() ) { - // if there's nothing here, don't both allocating a new list completely + if ( ! hasFinalizedItems() ) { + // if there's nothing here, don't bother allocating a new list return Collections.emptyList(); } else { - // pass by reference rather than make a copy, for speed - List downsampledItems = reservoir; - clear(); + // pass reservoir by reference rather than make a copy, for speed + final List downsampledItems = reservoir; + downsampledItems.addAll(undiscardableItems); + clearItems(); return downsampledItems; } } + @Override public boolean hasPendingItems() { return false; } + @Override public T peekFinalized() { - return reservoir.isEmpty() ? null : reservoir.get(0); + return ! reservoir.isEmpty() ? reservoir.get(0) : (! undiscardableItems.isEmpty() ? undiscardableItems.get(0) : null); } + @Override public T peekPending() { return null; } - public int getNumberOfDiscardedItems() { - return numDiscardedItems; + @Override + public int size() { + return reservoir.size() + undiscardableItems.size(); } @Override - public int size() { - return reservoir.size(); - } - public void signalEndOfInput() { // NO-OP } @@ -168,25 +192,27 @@ public class ReservoirDownsampler implements ReadsDownsampl /** * Clear the data structures used to hold information */ - public void clear() { + @Override + public void clearItems() { // if we aren't expecting many overflows, allocate a linked list not an arraylist reservoir = expectFewOverflows ? new LinkedList() : new ArrayList(targetSampleSize); + // there's no possibility of overflow with the undiscardable items, so we always use a linked list for them + undiscardableItems = new LinkedList<>(); + // it's a linked list if we allocate one isLinkedList = expectFewOverflows; - // an internal stat used by the downsampling process, so not cleared by reset() below - totalReadsSeen = 0; - } - - public void reset() { - numDiscardedItems = 0; + // an internal stat used by the downsampling process, so not cleared by resetStats() below + totalDiscardableReadsSeen = 0; } + @Override public boolean requiresCoordinateSortOrder() { return false; } + @Override public void signalNoMoreReadsBefore( T read ) { // NO-OP } diff --git a/public/java/src/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsampler.java b/public/java/src/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsampler.java index 7c6c043c2..897e2c05e 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsampler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsampler.java @@ -35,11 +35,11 @@ import java.util.*; * * @author David Roazen */ -public class SimplePositionalDownsampler implements ReadsDownsampler { +public class SimplePositionalDownsampler extends ReadsDownsampler { - private int targetCoverage; + private final int targetCoverage; - private ReservoirDownsampler reservoir; + private final ReservoirDownsampler reservoir; private int currentContigIndex; @@ -51,97 +51,93 @@ public class SimplePositionalDownsampler implements ReadsDo private ArrayList finalizedReads; - private int numDiscardedItems; /** * Construct a SimplePositionalDownsampler * * @param targetCoverage Maximum number of reads that may share any given alignment start position */ - public SimplePositionalDownsampler( int targetCoverage ) { + public SimplePositionalDownsampler( final int targetCoverage ) { this.targetCoverage = targetCoverage; reservoir = new ReservoirDownsampler(targetCoverage); finalizedReads = new ArrayList(); - clear(); - reset(); + clearItems(); + resetStats(); } - public void submit( T newRead ) { + @Override + public void submit( final T newRead ) { updatePositionalState(newRead); if ( unmappedReadsReached ) { // don't downsample the unmapped reads at the end of the stream finalizedReads.add(newRead); } else { - int reservoirPreviouslyDiscardedItems = reservoir.getNumberOfDiscardedItems(); + final int reservoirPreviouslyDiscardedItems = reservoir.getNumberOfDiscardedItems(); + // our reservoir downsampler will call doNotDiscardItem() for us to exclude items from elimination as appropriate reservoir.submit(newRead); numDiscardedItems += reservoir.getNumberOfDiscardedItems() - reservoirPreviouslyDiscardedItems; } } - public void submit( Collection newReads ) { - for ( T read : newReads ) { - submit(read); - } - } - + @Override public boolean hasFinalizedItems() { return finalizedReads.size() > 0; } + @Override public List consumeFinalizedItems() { // pass by reference rather than make a copy, for speed - List toReturn = finalizedReads; + final List toReturn = finalizedReads; finalizedReads = new ArrayList(); return toReturn; } + @Override public boolean hasPendingItems() { return reservoir.hasFinalizedItems(); } + @Override public T peekFinalized() { return finalizedReads.isEmpty() ? null : finalizedReads.get(0); } + @Override public T peekPending() { return reservoir.peekFinalized(); } - public int getNumberOfDiscardedItems() { - return numDiscardedItems; - } - @Override public int size() { return finalizedReads.size() + reservoir.size(); } + @Override public void signalEndOfInput() { finalizeReservoir(); } - public void clear() { - reservoir.clear(); - reservoir.reset(); + @Override + public void clearItems() { + reservoir.clearItems(); + reservoir.resetStats(); finalizedReads.clear(); positionEstablished = false; unmappedReadsReached = false; } - public void reset() { - numDiscardedItems = 0; - } - + @Override public boolean requiresCoordinateSortOrder() { return true; } - public void signalNoMoreReadsBefore( T read ) { + @Override + public void signalNoMoreReadsBefore( final T read ) { updatePositionalState(read); } - private void updatePositionalState( T newRead ) { + private void updatePositionalState( final T newRead ) { if ( readIsPastCurrentPosition(newRead) ) { if ( reservoir.hasFinalizedItems() ) { finalizeReservoir(); @@ -155,13 +151,13 @@ public class SimplePositionalDownsampler implements ReadsDo } } - private void setCurrentPosition( T read ) { + private void setCurrentPosition( final T read ) { currentContigIndex = read.getReferenceIndex(); currentAlignmentStart = read.getAlignmentStart(); positionEstablished = true; } - private boolean readIsPastCurrentPosition( T read ) { + private boolean readIsPastCurrentPosition( final T read ) { return ! positionEstablished || read.getReferenceIndex() > currentContigIndex || read.getAlignmentStart() > currentAlignmentStart || @@ -170,6 +166,6 @@ public class SimplePositionalDownsampler implements ReadsDo private void finalizeReservoir() { finalizedReads.addAll(reservoir.consumeFinalizedItems()); - reservoir.reset(); + reservoir.resetStats(); } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/executive/LinearMicroScheduler.java b/public/java/src/org/broadinstitute/sting/gatk/executive/LinearMicroScheduler.java index 415049228..dc46849df 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/executive/LinearMicroScheduler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/executive/LinearMicroScheduler.java @@ -37,7 +37,6 @@ import org.broadinstitute.sting.gatk.io.DirectOutputTracker; import org.broadinstitute.sting.gatk.io.OutputTracker; import org.broadinstitute.sting.gatk.resourcemanagement.ThreadAllocation; import org.broadinstitute.sting.gatk.traversals.TraversalEngine; -import org.broadinstitute.sting.gatk.traversals.TraverseActiveRegions; import org.broadinstitute.sting.gatk.walkers.Walker; import org.broadinstitute.sting.utils.SampleUtils; import org.broadinstitute.sting.utils.threading.ThreadEfficiencyMonitor; @@ -114,12 +113,6 @@ public class LinearMicroScheduler extends MicroScheduler { done = walker.isDone(); } - // Special function call to empty out the work queue. Ugly for now but will be cleaned up when we eventually push this functionality more into the engine - if( traversalEngine instanceof TraverseActiveRegions) { - final Object result = ((TraverseActiveRegions) traversalEngine).endTraversal(walker, accumulator.getReduceInit()); - accumulator.accumulate(null, result); // Assumes only used with StandardAccumulator - } - Object result = accumulator.finishTraversal(); outputTracker.close(); diff --git a/public/java/src/org/broadinstitute/sting/gatk/executive/MicroScheduler.java b/public/java/src/org/broadinstitute/sting/gatk/executive/MicroScheduler.java index 4ffdc88d8..7077db49c 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/executive/MicroScheduler.java +++ b/public/java/src/org/broadinstitute/sting/gatk/executive/MicroScheduler.java @@ -52,7 +52,6 @@ import javax.management.ObjectName; import java.io.File; import java.lang.management.ManagementFactory; import java.util.*; -import java.util.concurrent.TimeUnit; /** @@ -368,7 +367,7 @@ public abstract class MicroScheduler implements MicroSchedulerMBean { for ( final long countsByFilter : cumulativeMetrics.getCountsByFilter().values()) nSkippedReads += countsByFilter; - logger.info(String.format("%d reads were filtered out during traversal out of %d total (%.2f%%)", + logger.info(String.format("%d reads were filtered out during the traversal out of approximately %d total reads (%.2f%%)", nSkippedReads, cumulativeMetrics.getNumReadsSeen(), 100.0 * MathUtils.ratio(nSkippedReads, cumulativeMetrics.getNumReadsSeen()))); diff --git a/public/java/src/org/broadinstitute/sting/gatk/filters/CountingFilteringIterator.java b/public/java/src/org/broadinstitute/sting/gatk/filters/CountingFilteringIterator.java index 3e50632d9..1942fc19a 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/filters/CountingFilteringIterator.java +++ b/public/java/src/org/broadinstitute/sting/gatk/filters/CountingFilteringIterator.java @@ -1,28 +1,28 @@ -/* -* Copyright (c) 2012 The Broad Institute -* -* Permission is hereby granted, free of charge, to any person -* obtaining a copy of this software and associated documentation -* files (the "Software"), to deal in the Software without -* restriction, including without limitation the rights to use, -* copy, modify, merge, publish, distribute, sublicense, and/or sell -* copies of the Software, and to permit persons to whom the -* Software is furnished to do so, subject to the following -* conditions: -* -* The above copyright notice and this permission notice shall be -* included in all copies or substantial portions of the Software. -* -* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR -* THE USE OR OTHER DEALINGS IN THE SOFTWARE. -*/ - +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + package org.broadinstitute.sting.gatk.filters; import net.sf.picard.filter.SamRecordFilter; @@ -31,9 +31,7 @@ import net.sf.samtools.util.CloseableIterator; import net.sf.samtools.util.CloserUtil; import org.broadinstitute.sting.gatk.ReadMetrics; -import java.util.Collection; -import java.util.Iterator; -import java.util.NoSuchElementException; +import java.util.*; /** * Filtering Iterator which takes a filter and an iterator and iterates @@ -41,11 +39,30 @@ import java.util.NoSuchElementException; * @author Mark DePristo */ public class CountingFilteringIterator implements CloseableIterator { - private final ReadMetrics runtimeMetrics; + private final ReadMetrics globalRuntimeMetrics; + private final ReadMetrics privateRuntimeMetrics; private final Iterator iterator; - private final Collection filters; + private final List filters = new ArrayList<>(); private SAMRecord next = null; + // wrapper around ReadFilters to count the number of filtered reads + private final class CountingReadFilter extends ReadFilter { + protected final ReadFilter readFilter; + protected long counter = 0L; + + public CountingReadFilter(final ReadFilter readFilter) { + this.readFilter = readFilter; + } + + @Override + public boolean filterOut(final SAMRecord record) { + final boolean result = readFilter.filterOut(record); + if ( result ) + counter++; + return result; + } + } + /** * Constructor * @@ -54,9 +71,11 @@ public class CountingFilteringIterator implements CloseableIterator { * @param filters the filter (which may be a FilterAggregator) */ public CountingFilteringIterator(ReadMetrics metrics, Iterator iterator, Collection filters) { - this.runtimeMetrics = metrics; + this.globalRuntimeMetrics = metrics; + privateRuntimeMetrics = new ReadMetrics(); this.iterator = iterator; - this.filters = filters; + for ( final ReadFilter filter : filters ) + this.filters.add(new CountingReadFilter(filter)); next = getNextRecord(); } @@ -95,6 +114,11 @@ public class CountingFilteringIterator implements CloseableIterator { public void close() { CloserUtil.close(iterator); + + // update the global metrics with all the data we collected here + globalRuntimeMetrics.incrementMetrics(privateRuntimeMetrics); + for ( final CountingReadFilter filter : filters ) + globalRuntimeMetrics.setFilterCount(filter.readFilter.getClass().getSimpleName(), filter.counter); } /** @@ -105,12 +129,14 @@ public class CountingFilteringIterator implements CloseableIterator { private SAMRecord getNextRecord() { while (iterator.hasNext()) { SAMRecord record = iterator.next(); - runtimeMetrics.incrementNumReadsSeen(); + + // update only the private copy of the metrics so that we don't need to worry about race conditions + // that can arise when trying to update the global copy; it was agreed that this is the cleanest solution. + privateRuntimeMetrics.incrementNumReadsSeen(); boolean filtered = false; for(SamRecordFilter filter: filters) { if(filter.filterOut(record)) { - runtimeMetrics.incrementFilter(filter); filtered = true; break; } diff --git a/public/java/src/org/broadinstitute/sting/gatk/filters/MalformedReadFilter.java b/public/java/src/org/broadinstitute/sting/gatk/filters/MalformedReadFilter.java index f7d1d0297..3167ba139 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/filters/MalformedReadFilter.java +++ b/public/java/src/org/broadinstitute/sting/gatk/filters/MalformedReadFilter.java @@ -25,14 +25,16 @@ package org.broadinstitute.sting.gatk.filters; -import net.sf.samtools.SAMFileHeader; -import net.sf.samtools.SAMRecord; -import net.sf.samtools.SAMSequenceRecord; -import net.sf.samtools.SAMTagUtil; +import net.sf.samtools.*; import org.broadinstitute.sting.commandline.Argument; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; +import org.broadinstitute.sting.gatk.ReadProperties; +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; +import org.broadinstitute.sting.gatk.datasources.reads.SAMDataSource; import org.broadinstitute.sting.utils.exceptions.UserException; +import java.util.Collections; + /** * Filter out malformed reads. * @@ -40,20 +42,46 @@ import org.broadinstitute.sting.utils.exceptions.UserException; * @version 0.1 */ public class MalformedReadFilter extends ReadFilter { + + + private static final String FILTER_READS_WITH_N_CIGAR_ARGUMENT_FULL_NAME = "filter_reads_with_N_cigar" ; + private SAMFileHeader header; + @Argument(fullName = FILTER_READS_WITH_N_CIGAR_ARGUMENT_FULL_NAME, shortName = "filterRNC", doc = "filter out reads with CIGAR containing the N operator, instead of stop processing and report an error.", required = false) + boolean filterReadsWithNCigar = false; + + @Argument(fullName = "filter_mismatching_base_and_quals", shortName = "filterMBQ", doc = "if a read has mismatching number of bases and base qualities, filter out the read instead of blowing up.", required = false) boolean filterMismatchingBaseAndQuals = false; @Argument(fullName = "filter_bases_not_stored", shortName = "filterNoBases", doc = "if a read has no stored bases (i.e. a '*'), filter out the read instead of blowing up.", required = false) boolean filterBasesNotStored = false; + /** + * Indicates the applicable validation exclusions + */ + private boolean allowNCigars; + @Override - public void initialize(GenomeAnalysisEngine engine) { - this.header = engine.getSAMFileHeader(); + public void initialize(final GenomeAnalysisEngine engine) { + header = engine.getSAMFileHeader(); + ValidationExclusion validationExclusions = null; + final SAMDataSource rds = engine.getReadsDataSource(); + if (rds != null) { + final ReadProperties rps = rds.getReadsInfo(); + if (rps != null) { + validationExclusions = rps.getValidationExclusionList(); + } + } + if (validationExclusions == null) { + allowNCigars = false; + } else { + allowNCigars = validationExclusions.contains(ValidationExclusion.TYPE.ALLOW_N_CIGAR_READS); + } } - public boolean filterOut(SAMRecord read) { + public boolean filterOut(final SAMRecord read) { // slowly changing the behavior to blow up first and filtering out if a parameter is explicitly provided return !checkInvalidAlignmentStart(read) || !checkInvalidAlignmentEnd(read) || @@ -61,7 +89,8 @@ public class MalformedReadFilter extends ReadFilter { !checkHasReadGroup(read) || !checkMismatchingBasesAndQuals(read, filterMismatchingBaseAndQuals) || !checkCigarDisagreesWithAlignment(read) || - !checkSeqStored(read, filterBasesNotStored); + !checkSeqStored(read, filterBasesNotStored) || + !checkCigarIsSupported(read,filterReadsWithNCigar,allowNCigars); } private static boolean checkHasReadGroup(final SAMRecord read) { @@ -80,7 +109,7 @@ public class MalformedReadFilter extends ReadFilter { * @param read The read to validate. * @return true if read start is valid, false otherwise. */ - private static boolean checkInvalidAlignmentStart( SAMRecord read ) { + private static boolean checkInvalidAlignmentStart(final SAMRecord read ) { // read is not flagged as 'unmapped', but alignment start is NO_ALIGNMENT_START if( !read.getReadUnmappedFlag() && read.getAlignmentStart() == SAMRecord.NO_ALIGNMENT_START ) return false; @@ -95,7 +124,7 @@ public class MalformedReadFilter extends ReadFilter { * @param read The read to validate. * @return true if read end is valid, false otherwise. */ - private static boolean checkInvalidAlignmentEnd( SAMRecord read ) { + private static boolean checkInvalidAlignmentEnd(final SAMRecord read ) { // Alignment aligns to negative number of bases in the reference. if( !read.getReadUnmappedFlag() && read.getAlignmentEnd() != -1 && (read.getAlignmentEnd()-read.getAlignmentStart()+1)<0 ) return false; @@ -108,11 +137,11 @@ public class MalformedReadFilter extends ReadFilter { * @param read The read to verify. * @return true if alignment agrees with header, false othrewise. */ - private static boolean checkAlignmentDisagreesWithHeader( SAMFileHeader header, SAMRecord read ) { + private static boolean checkAlignmentDisagreesWithHeader(final SAMFileHeader header, final SAMRecord read ) { // Read is aligned to nonexistent contig if( read.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX && read.getAlignmentStart() != SAMRecord.NO_ALIGNMENT_START ) return false; - SAMSequenceRecord contigHeader = header.getSequence( read.getReferenceIndex() ); + final SAMSequenceRecord contigHeader = header.getSequence( read.getReferenceIndex() ); // Read is aligned to a point after the end of the contig if( !read.getReadUnmappedFlag() && read.getAlignmentStart() > contigHeader.getSequenceLength() ) return false; @@ -124,7 +153,7 @@ public class MalformedReadFilter extends ReadFilter { * @param read The read to validate. * @return true if cigar agrees with alignment, false otherwise. */ - private static boolean checkCigarDisagreesWithAlignment(SAMRecord read) { + private static boolean checkCigarDisagreesWithAlignment(final SAMRecord read) { // Read has a valid alignment start, but the CIGAR string is empty if( !read.getReadUnmappedFlag() && read.getAlignmentStart() != -1 && @@ -134,19 +163,81 @@ public class MalformedReadFilter extends ReadFilter { return true; } + /** + * Check for unsupported CIGAR operators. + * Currently the N operator is not supported. + * @param read The read to validate. + * @param filterReadsWithNCigar whether the offending read should just + * be silently filtered or not. + * @param allowNCigars whether reads that contain N operators in their CIGARs + * can be processed or an exception should be thrown instead. + * @throws UserException.UnsupportedCigarOperatorException + * if {@link #filterReadsWithNCigar} is false and + * the input read has some unsupported operation. + * @return true if the read CIGAR operations are + * fully supported, otherwise false, as long as + * no exception has been thrown. + */ + private static boolean checkCigarIsSupported(final SAMRecord read, final boolean filterReadsWithNCigar, final boolean allowNCigars) { + if( containsNOperator(read)) { + if (! filterReadsWithNCigar && !allowNCigars) { + throw new UserException.UnsupportedCigarOperatorException( + CigarOperator.N,read, + "Perhaps you are" + + " trying to use RNA-Seq data?" + + " While we are currently actively working to" + + " support this data type unfortunately the" + + " GATK cannot be used with this data in its" + + " current form. You have the option of either" + + " filtering out all reads with operator " + + CigarOperator.N + " in their CIGAR string" + + " (please add --" + + FILTER_READS_WITH_N_CIGAR_ARGUMENT_FULL_NAME + + " to your command line) or" + + " assume the risk of processing those reads as they" + + " are including the pertinent unsafe flag (please add -U" + + ' ' + ValidationExclusion.TYPE.ALLOW_N_CIGAR_READS + + " to your command line). Notice however that if you were" + + " to choose the latter, an unspecified subset of the" + + " analytical outputs of an unspecified subset of the tools" + + " will become unpredictable. Consequently the GATK team" + + " might well not be able to provide you with the usual support" + + " with any issue regarding any output"); + } + return ! filterReadsWithNCigar; + } + return true; + } + + private static boolean containsNOperator(final SAMRecord read) { + final Cigar cigar = read.getCigar(); + if (cigar == null) { + return false; + } + for (final CigarElement ce : cigar.getCigarElements()) { + if (ce.getOperator() == CigarOperator.N) { + return true; + } + } + return false; + } + /** * Check if the read has the same number of bases and base qualities * @param read the read to validate * @return true if they have the same number. False otherwise. */ - private static boolean checkMismatchingBasesAndQuals(SAMRecord read, boolean filterMismatchingBaseAndQuals) { - boolean result; + private static boolean checkMismatchingBasesAndQuals(final SAMRecord read, final boolean filterMismatchingBaseAndQuals) { + final boolean result; if (read.getReadLength() == read.getBaseQualities().length) result = true; else if (filterMismatchingBaseAndQuals) result = false; else - throw new UserException.MalformedBAM(read, String.format("BAM file has a read with mismatching number of bases and base qualities. Offender: %s [%d bases] [%d quals]", read.getReadName(), read.getReadLength(), read.getBaseQualities().length)); + throw new UserException.MalformedBAM(read, + String.format("BAM file has a read with mismatching number of bases and base qualities. Offender: %s [%d bases] [%d quals].%s", + read.getReadName(), read.getReadLength(), read.getBaseQualities().length, + read.getBaseQualities().length == 0 ? " You can use --defaultBaseQualities to assign a default base quality for all reads, but this can be dangerous in you don't know what you are doing." : "")); return result; } diff --git a/public/java/src/org/broadinstitute/sting/gatk/io/storage/VariantContextWriterStorage.java b/public/java/src/org/broadinstitute/sting/gatk/io/storage/VariantContextWriterStorage.java index 84709d6d8..80841bae7 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/io/storage/VariantContextWriterStorage.java +++ b/public/java/src/org/broadinstitute/sting/gatk/io/storage/VariantContextWriterStorage.java @@ -67,12 +67,16 @@ public class VariantContextWriterStorage implements Storage, Var if ( header.isWriteEngineHeaders() ) { // skip writing the command line header if requested if ( ! skipWritingCommandLineHeader && header.isWriteCommandLine() ) { - // Check for the command-line argument header line. If not present, add it in. - final VCFHeaderLine commandLineArgHeaderLine = getCommandLineArgumentHeaderLine(); - final boolean foundCommandLineHeaderLine = vcfHeader.getMetaDataLine(commandLineArgHeaderLine.getKey()) != null; - if ( ! foundCommandLineHeaderLine ) - vcfHeader.addMetaDataLine(commandLineArgHeaderLine); + // Always add the header line, as the current format allows multiple entries + final VCFHeaderLine commandLineArgHeaderLine = GATKVCFUtils.getCommandLineArgumentHeaderLine(engine, argumentSources); + vcfHeader.addMetaDataLine(commandLineArgHeaderLine); } if ( UPDATE_CONTIG_HEADERS ) @@ -275,13 +271,4 @@ public class VariantContextWriterStub implements Stub, Var getOutputFile() != null && // that are going to disk engine.getArguments().generateShadowBCF; // and we actually want to do it } - - /** - * Gets the appropriately formatted header for a VCF file - * @return VCF file header. - */ - private VCFHeaderLine getCommandLineArgumentHeaderLine() { - CommandLineExecutable executable = JVMUtils.getObjectOfType(argumentSources,CommandLineExecutable.class); - return new VCFHeaderLine(executable.getAnalysisName(), "\"" + engine.createApproximateCommandLineArgumentString(argumentSources.toArray()) + "\""); - } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/iterators/ReadFormattingIterator.java b/public/java/src/org/broadinstitute/sting/gatk/iterators/ReadFormattingIterator.java index c3b4aaa0a..f9d2f4802 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/iterators/ReadFormattingIterator.java +++ b/public/java/src/org/broadinstitute/sting/gatk/iterators/ReadFormattingIterator.java @@ -27,6 +27,7 @@ package org.broadinstitute.sting.gatk.iterators; import net.sf.samtools.SAMRecord; import org.apache.log4j.Logger; +import org.broadinstitute.sting.utils.sam.AlignmentUtils; /** * An iterator which does post-processing of a read, including potentially wrapping @@ -104,6 +105,10 @@ public class ReadFormattingIterator implements StingSAMIterator { public SAMRecord next() { SAMRecord rec = wrappedIterator.next(); + // Always consolidate the cigar string into canonical form, collapsing zero-length / repeated cigar elements. + // Downstream code (like LocusIteratorByState) cannot necessarily handle non-consolidated cigar strings. + rec.setCigar(AlignmentUtils.consolidateCigar(rec.getCigar())); + // if we are using default quals, check if we need them, and add if necessary. // 1. we need if reads are lacking or have incomplete quality scores // 2. we add if defaultBaseQualities has a positive value diff --git a/public/java/src/org/broadinstitute/sting/gatk/phonehome/GATKRunReport.java b/public/java/src/org/broadinstitute/sting/gatk/phonehome/GATKRunReport.java index de84809bd..67d72189c 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/phonehome/GATKRunReport.java +++ b/public/java/src/org/broadinstitute/sting/gatk/phonehome/GATKRunReport.java @@ -78,22 +78,6 @@ public class GATKRunReport { private static final DateFormat DATE_FORMAT = new SimpleDateFormat("yyyy/MM/dd HH.mm.ss"); - /** - * The root file system directory where we keep common report data - */ - private final static File REPORT_DIR = new File("/humgen/gsa-hpprojects/GATK/reports"); - - /** - * The full path to the direct where submitted (and uncharacterized) report files are written - */ - private final static File REPORT_SUBMIT_DIR = new File(REPORT_DIR.getAbsolutePath() + "/submitted"); - - /** - * Full path to the sentinel file that controls whether reports are written out. If this file doesn't - * exist, no long will be written - */ - private final static File REPORT_SENTINEL = new File(REPORT_DIR.getAbsolutePath() + "/ENABLE"); - /** * our log */ @@ -181,8 +165,6 @@ public class GATKRunReport { public enum PhoneHomeOption { /** Disable phone home */ NO_ET, - /** Standard option. Writes to local repository if it can be found, or S3 otherwise */ - STANDARD, /** Forces the report to go to S3 */ AWS, /** Force output to STDOUT. For debugging only */ @@ -365,14 +347,9 @@ public class GATKRunReport { switch (type) { case NO_ET: // don't do anything return false; - case STANDARD: case AWS: - if ( type == PhoneHomeOption.STANDARD && repositoryIsOnline() ) { - return postReportToLocalDisk(getLocalReportFullPath()) != null; - } else { - wentToAWS = true; - return postReportToAWSS3() != null; - } + wentToAWS = true; + return postReportToAWSS3() != null; case STDOUT: return postReportToStream(System.out); default: @@ -404,50 +381,6 @@ public class GATKRunReport { } } - /** - * Get the full path as a file where we'll write this report to local disl - * @return a non-null File - */ - @Ensures("result != null") - protected File getLocalReportFullPath() { - return new File(REPORT_SUBMIT_DIR, getReportFileName()); - } - - /** - * Is the local GATKRunReport repository available for writing reports? - * - * @return true if and only if the common run report repository is available and online to receive reports - */ - private boolean repositoryIsOnline() { - return REPORT_SENTINEL.exists(); - } - - - /** - * Main entry point to writing reports to disk. Posts the XML report to the common GATK run report repository. - * If this process fails for any reason, all exceptions are handled and this routine merely prints a warning. - * That is, postReport() is guarenteed not to fail for any reason. - * - * @return the path where the file was written, or null if any failure occurred - */ - @Requires("destination != null") - private File postReportToLocalDisk(final File destination) { - try { - final BufferedOutputStream out = new BufferedOutputStream( - new GZIPOutputStream( - new FileOutputStream(destination))); - postReportToStream(out); - out.close(); - logger.debug("Wrote report to " + destination); - return destination; - } catch ( Exception e ) { - // we catch everything, and no matter what eat the error - exceptDuringRunReport("Couldn't read report file", e); - destination.delete(); - return null; - } - } - // --------------------------------------------------------------------------- // // Code for sending reports to s3 diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCache.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCache.java index 80da8f8eb..424bd489e 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCache.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCache.java @@ -43,17 +43,42 @@ import java.util.List; * Time: 11:23 AM */ public class TAROrderedReadCache { - final int maxCapacity; - final Downsampler downsampler; + private final int maxCapacity; + private ArrayList undownsampledCache; + private Downsampler downsampler; + + private static final int UNDOWNSAMPLED_CACHE_MAX_INITIAL_SIZE = 10000; /** * Create a new empty ReadCache * @param maxCapacity the max capacity of the read cache. */ - public TAROrderedReadCache(int maxCapacity) { + public TAROrderedReadCache( final int maxCapacity ) { if ( maxCapacity < 0 ) throw new IllegalArgumentException("maxCapacity must be >= 0 but got " + maxCapacity); this.maxCapacity = maxCapacity; - this.downsampler = new ReservoirDownsampler(maxCapacity); + + // The one we're not currently using will always be null: + initializeUndownsampledCache(); + this.downsampler = null; + } + + /** + * Moves all reads over to the downsampler, causing it to be used from this point on. Should be called + * when the undownsampledCache fills up and we need to start discarding reads. Since the + * ReservoirDownsampler doesn't preserve relative ordering, pop operations become expensive + * after this point, as they require a O(n log n) sort. + */ + private void activateDownsampler() { + downsampler = new ReservoirDownsampler<>(maxCapacity, false); + downsampler.submit(undownsampledCache); + undownsampledCache = null; // preferable to the O(n) clear() method + } + + /** + * Allocate the undownsampled cache used when we have fewer than maxCapacity items + */ + private void initializeUndownsampledCache() { + undownsampledCache = new ArrayList<>(Math.min(maxCapacity + 1, UNDOWNSAMPLED_CACHE_MAX_INITIAL_SIZE)); } /** @@ -68,18 +93,31 @@ public class TAROrderedReadCache { * Add a single read to this cache. Assumed to be in sorted order w.r.t. the previously added reads * @param read a read to add */ - public void add(final GATKSAMRecord read) { + public void add( final GATKSAMRecord read ) { if ( read == null ) throw new IllegalArgumentException("Read cannot be null"); - downsampler.submit(read); + + if ( downsampler != null ) { + downsampler.submit(read); + } + else { + undownsampledCache.add(read); + + // No more room in the undownsampledCache? Time to start downsampling + if ( undownsampledCache.size() > maxCapacity ) { + activateDownsampler(); + } + } } /** * Add a collection of reads to this cache. Assumed to be in sorted order w.r.t. the previously added reads and each other * @param reads a collection of reads to add */ - public void addAll(final List reads) { + public void addAll( final List reads ) { if ( reads == null ) throw new IllegalArgumentException("Reads cannot be null"); - downsampler.submit(reads); + for ( final GATKSAMRecord read : reads ) { + add(read); + } } /** @@ -87,40 +125,44 @@ public class TAROrderedReadCache { * @return a positive integer */ public int size() { - return downsampler.size(); + return downsampler != null ? downsampler.size() : undownsampledCache.size(); } /** * How many reads were discarded since the last call to popCurrentReads - * @return + * + * @return number of items discarded during downsampling since last pop operation */ public int getNumDiscarded() { - return downsampler.getNumberOfDiscardedItems(); + return downsampler != null ? downsampler.getNumberOfDiscardedItems() : 0; } /** * Removes all reads currently in the cache, and returns them in sorted order (w.r.t. alignmentStart) * - * Flushes this cache, so after this call the cache will contain no reads and all downsampling stats will - * be reset. + * Flushes this cache, so after this call the cache will contain no reads, and we'll be in the same + * initial state as the constructor would put us in, with a non-null undownsampledCache and a null + * downsampler. * * @return a list of GATKSAMRecords in this cache */ public List popCurrentReads() { - final List maybeUnordered = downsampler.consumeFinalizedItems(); + final List poppedReads; - final List ordered; - if ( downsampler.getNumberOfDiscardedItems() == 0 ) { - // haven't discarded anything, so the reads are ordered properly - ordered = maybeUnordered; - } else { - // we need to sort these damn things: O(n log n) - ordered = new ArrayList(maybeUnordered); - Collections.sort(ordered, new AlignmentStartComparator()); + if ( downsampler == null ) { + poppedReads = undownsampledCache; // avoid making a copy here, since we're going to allocate a new cache + } + else { + // If we triggered the downsampler, we need to sort the reads before returning them, + // since the ReservoirDownsampler is not guaranteed to preserve relative ordering of items. + // After consuming the downsampled items in this call to popCurrentReads(), we switch back + // to using the undownsampledCache until we fill up again. + poppedReads = downsampler.consumeFinalizedItems(); // avoid making a copy here + Collections.sort(poppedReads, new AlignmentStartComparator()); + downsampler = null; } - // reset the downsampler stats so getNumberOfDiscardedItems is 0 - downsampler.reset(); - return ordered; + initializeUndownsampledCache(); + return poppedReads; } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraversalEngine.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraversalEngine.java index 0811e5e70..529b3ef17 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraversalEngine.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraversalEngine.java @@ -100,15 +100,6 @@ public abstract class TraversalEngine,Provide // by default there's nothing to do } - /** - * Update the cumulative traversal metrics according to the data in this shard - * - * @param shard a non-null shard - */ - public void updateCumulativeMetrics(final Shard shard) { - updateCumulativeMetrics(shard.getReadMetrics()); - } - /** * Update the cumulative traversal metrics according to the data in this shard * diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java index b1e5b907f..b85365366 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java @@ -29,14 +29,12 @@ import com.google.java.contract.Ensures; import com.google.java.contract.Requires; import org.apache.log4j.Logger; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; -import org.broadinstitute.sting.gatk.WalkerManager; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.datasources.providers.*; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.ActiveRegionTraversalParameters; import org.broadinstitute.sting.gatk.walkers.ActiveRegionWalker; -import org.broadinstitute.sting.gatk.walkers.DataSource; import org.broadinstitute.sting.gatk.walkers.Walker; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.SampleUtils; @@ -92,12 +90,26 @@ public final class TraverseActiveRegions extends TraversalEngine walker; - final NanoScheduler nanoScheduler; + final NanoScheduler nanoScheduler; + + /** + * Data to use in the ActiveRegionWalker.map function produced by the NanoScheduler input iterator + */ + private static class MapData { + public ActiveRegion activeRegion; + public RefMetaDataTracker tracker; + + private MapData(ActiveRegion activeRegion, RefMetaDataTracker tracker) { + this.activeRegion = activeRegion; + this.tracker = tracker; + } + } /** * Create a single threaded active region traverser @@ -112,12 +124,12 @@ public final class TraverseActiveRegions extends TraversalEngine(nThreads); - nanoScheduler.setProgressFunction(new NSProgressFunction() { + nanoScheduler.setProgressFunction(new NSProgressFunction() { @Override - public void progress(ActiveRegion lastActiveRegion) { + public void progress(MapData lastActiveRegion) { if ( lastActiveRegion != null ) // note, need to use getStopLocation so we don't give an interval to ProgressMeterDaemon - printProgress(lastActiveRegion.getLocation().getStopLocation()); + printProgress(lastActiveRegion.activeRegion.getLocation().getStopLocation()); } }); } @@ -149,13 +161,6 @@ public final class TraverseActiveRegions extends TraversalEngine extends TraversalEngine extends TraversalEngine walker, - final LocusShardDataProvider dataProvider, - final LocusView locusView) { - if ( WalkerManager.getWalkerDataSource(walker) != DataSource.REFERENCE_ORDERED_DATA ) - return new ManagingReferenceOrderedView( dataProvider ); - else - return (RodLocusView)locusView; - } - - // ------------------------------------------------------------------------------------- // // Actual traverse function @@ -254,39 +267,61 @@ public final class TraverseActiveRegions extends TraversalEngine activeRegionIterator = new ActiveRegionIterator(dataProvider); + final Iterator activeRegionIterator = new ActiveRegionIterator(dataProvider); final TraverseActiveRegionMap myMap = new TraverseActiveRegionMap(); final TraverseActiveRegionReduce myReduce = new TraverseActiveRegionReduce(); final T result = nanoScheduler.execute(activeRegionIterator, myMap, sum, myReduce); - updateCumulativeMetrics(dataProvider.getShard()); - return result; } - private class ActiveRegionIterator implements Iterator { + private class ActiveRegionIterator implements Iterator { private final LocusShardDataProvider dataProvider; - private LinkedList readyActiveRegions = new LinkedList(); + private LinkedList readyActiveRegions = new LinkedList<>(); private boolean done = false; private final LocusView locusView; private final LocusReferenceView referenceView; - private final ReferenceOrderedView referenceOrderedDataView; private final GenomeLoc locOfLastReadAtTraversalStart; + private final IntervalReferenceOrderedView referenceOrderedDataView; + private final GenomeLoc currentWindow; + private final boolean processRemainingActiveRegions; public ActiveRegionIterator( final LocusShardDataProvider dataProvider ) { this.dataProvider = dataProvider; locusView = new AllLocusView(dataProvider); referenceView = new LocusReferenceView( walker, dataProvider ); - referenceOrderedDataView = getReferenceOrderedView(walker, dataProvider, locusView); + + // The data shard may carry a number of locations to process (due to being indexed together). + // This value is just the interval we are processing within the entire provider + currentWindow = dataProvider.getLocus(); + final int currentWindowPos = dataProvider.getShard().getGenomeLocs().indexOf(currentWindow); + if ( currentWindowPos == -1 ) throw new IllegalStateException("Data provider " + dataProvider + " didn't have our current window in it " + currentWindow); + processRemainingActiveRegions = currentWindowPos == dataProvider.getShard().getGenomeLocs().size() - 1; + + // the rodSpan covers all of the bases in the activity profile, including all of the bases + // through the current window interval. This is because we may issue a query to get data for an + // active region spanning before the current interval as far back as the start of the current profile, + // if we have pending work to do that finalizes in this interval. + final GenomeLoc rodSpan = activityProfile.getSpan() == null ? currentWindow : activityProfile.getSpan().endpointSpan(currentWindow); + if ( ! dataProvider.getShard().getLocation().containsP(rodSpan) ) throw new IllegalStateException("Rod span " + rodSpan + " isn't contained within the data shard " + dataProvider.getShard().getLocation() + ", meaning we wouldn't get all of the data we need"); + referenceOrderedDataView = new IntervalReferenceOrderedView( dataProvider, rodSpan ); // We keep processing while the next reference location is within the interval locOfLastReadAtTraversalStart = spanOfLastSeenRead(); + + // load in the workQueue the present regions that span the current contig, if it's different from the last one + if ( walkerHasPresetRegions && ( lastRegionProcessed == null || ! currentWindow.onSameContig(lastRegionProcessed)) ) { + loadPresetRegionsForContigToWorkQueue(currentWindow.getContig()); + } + + // remember the last region we processed for sanity checking later + lastRegionProcessed = currentWindow; } @Override public void remove() { throw new UnsupportedOperationException("Cannot remove from ActiveRegionIterator"); } @Override - public ActiveRegion next() { + public MapData next() { return readyActiveRegions.pop(); } @Override @@ -328,7 +363,7 @@ public final class TraverseActiveRegions extends TraversalEngine newActiveRegions = prepActiveRegionsForProcessing(walker, flushProfile, false); + final List newActiveRegions = prepActiveRegionsForProcessing(walker, flushProfile, false, referenceOrderedDataView); dataProvider.getShard().getReadMetrics().incrementNumIterations(); @@ -337,7 +372,7 @@ public final class TraverseActiveRegions extends TraversalEngine extends TraversalEngine walker, T sum) { - for ( final ActiveRegion region : prepActiveRegionsForProcessing((ActiveRegionWalker)walker, true, true) ) { - final M x = ((ActiveRegionWalker) walker).map(region, null); - sum = walker.reduce( x, sum ); - } - return sum; - } - // ------------------------------------------------------------------------------------- // // Functions to manage and interact with the live / dead zone @@ -596,7 +627,10 @@ public final class TraverseActiveRegions extends TraversalEngine prepActiveRegionsForProcessing(final ActiveRegionWalker walker, final boolean flushActivityProfile, final boolean forceAllRegionsToBeActive) { + private List prepActiveRegionsForProcessing(final ActiveRegionWalker walker, + final boolean flushActivityProfile, + final boolean forceAllRegionsToBeActive, + final IntervalReferenceOrderedView referenceOrderedDataView) { if ( ! walkerHasPresetRegions ) { // We don't have preset regions, so we get our regions from the activity profile final Collection activeRegions = activityProfile.popReadyActiveRegions(getActiveRegionExtension(), getMinRegionSize(), getMaxRegionSize(), flushActivityProfile); @@ -605,13 +639,13 @@ public final class TraverseActiveRegions extends TraversalEngine readyRegions = new LinkedList(); + final LinkedList readyRegions = new LinkedList<>(); while( workQueue.peek() != null ) { final ActiveRegion activeRegion = workQueue.peek(); if ( forceAllRegionsToBeActive || regionCompletelyWithinDeadZone(activeRegion) ) { writeActivityProfile(activeRegion.getSupportingStates()); writeActiveRegion(activeRegion); - readyRegions.add(prepActiveRegionForProcessing(workQueue.remove(), walker)); + readyRegions.add(prepActiveRegionForProcessing(workQueue.remove(), walker, referenceOrderedDataView)); } else { break; } @@ -621,8 +655,10 @@ public final class TraverseActiveRegions extends TraversalEngine walker) { - final List stillLive = new LinkedList(); + private MapData prepActiveRegionForProcessing(final ActiveRegion activeRegion, + final ActiveRegionWalker walker, + final IntervalReferenceOrderedView referenceOrderedDataView) { + final List stillLive = new LinkedList<>(); for ( final GATKSAMRecord read : myReads.popCurrentReads() ) { boolean killed = false; final GenomeLoc readLoc = this.engine.getGenomeLocParser().createGenomeLoc( read ); @@ -655,14 +691,21 @@ public final class TraverseActiveRegions extends TraversalEngine { + private class TraverseActiveRegionMap implements NSMapFunction { @Override - public M apply(final ActiveRegion activeRegion) { - if ( DEBUG ) logger.info("Executing walker.map for " + activeRegion + " in thread " + Thread.currentThread().getName()); - return walker.map(activeRegion, null); + public M apply(final MapData mapData) { + if ( DEBUG ) logger.info("Executing walker.map for " + mapData.activeRegion + " in thread " + Thread.currentThread().getName()); + return walker.map(mapData.activeRegion, mapData.tracker); } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseDuplicates.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseDuplicates.java index efa042fdb..17f23de8f 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseDuplicates.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseDuplicates.java @@ -196,7 +196,6 @@ public class TraverseDuplicates extends TraversalEngine extends TraversalEngine, final TraverseResults result = traverse( walker, locusView, referenceView, referenceOrderedDataView, sum ); sum = result.reduceResult; dataProvider.getShard().getReadMetrics().incrementNumIterations(result.numIterations); - updateCumulativeMetrics(dataProvider.getShard()); } // We have a final map call to execute here to clean up the skipped based from the @@ -180,7 +179,7 @@ public class TraverseLociNano extends TraversalEngine, final ReferenceContext refContext = referenceView.getReferenceContext(location); // Iterate forward to get all reference ordered data covering this location - final RefMetaDataTracker tracker = referenceOrderedDataView.getReferenceOrderedDataAtLocus(location, refContext); + final RefMetaDataTracker tracker = referenceOrderedDataView.getReferenceOrderedDataAtLocus(location); numIterations++; return new MapData(locus, refContext, tracker); diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseReadPairs.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseReadPairs.java index aed88509e..764011a48 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseReadPairs.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseReadPairs.java @@ -90,7 +90,6 @@ public class TraverseReadPairs extends TraversalEngine extends TraversalEngine, final Iterator aggregatedInputs = aggregateMapData(dataProvider); final T result = nanoScheduler.execute(aggregatedInputs, myMap, sum, myReduce); - updateCumulativeMetrics(dataProvider.getShard()); - return result; } diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/ActiveRegionWalker.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/ActiveRegionWalker.java index 9595b8f42..962f81d0d 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/ActiveRegionWalker.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/ActiveRegionWalker.java @@ -31,6 +31,7 @@ import org.broad.tribble.Feature; import org.broadinstitute.sting.commandline.*; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.downsampling.DownsampleType; import org.broadinstitute.sting.gatk.filters.*; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.utils.GenomeLoc; @@ -57,6 +58,7 @@ import java.util.*; @PartitionBy(PartitionType.READ) @ActiveRegionTraversalParameters(extension=50,maxRegion=1500) @ReadFilters({UnmappedReadFilter.class, NotPrimaryAlignmentFilter.class, DuplicateReadFilter.class, FailsVendorQualityCheckFilter.class, MappingQualityUnavailableFilter.class}) +@Downsample(by = DownsampleType.BY_SAMPLE, toCoverage = 1000) @RemoveProgramRecords public abstract class ActiveRegionWalker extends Walker { /** diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/LocusWalker.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/LocusWalker.java index 788bf11f9..9997723b8 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/LocusWalker.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/LocusWalker.java @@ -27,6 +27,7 @@ package org.broadinstitute.sting.gatk.walkers; import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.downsampling.DownsampleType; import org.broadinstitute.sting.gatk.filters.DuplicateReadFilter; import org.broadinstitute.sting.gatk.filters.FailsVendorQualityCheckFilter; import org.broadinstitute.sting.gatk.filters.NotPrimaryAlignmentFilter; @@ -44,6 +45,7 @@ import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; @Requires({DataSource.READS,DataSource.REFERENCE}) @PartitionBy(PartitionType.LOCUS) @ReadFilters({UnmappedReadFilter.class,NotPrimaryAlignmentFilter.class,DuplicateReadFilter.class,FailsVendorQualityCheckFilter.class}) +@Downsample(by = DownsampleType.BY_SAMPLE, toCoverage = 1000) @RemoveProgramRecords public abstract class LocusWalker extends Walker { // Do we actually want to operate on the context? diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/Walker.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/Walker.java index 522414c00..40485596d 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/Walker.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/Walker.java @@ -29,6 +29,7 @@ import net.sf.samtools.SAMSequenceDictionary; import org.apache.log4j.Logger; import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; +import org.broadinstitute.sting.gatk.downsampling.DownsampleType; import org.broadinstitute.sting.gatk.filters.MalformedReadFilter; import org.broadinstitute.sting.gatk.iterators.ReadTransformer; import org.broadinstitute.sting.gatk.samples.Sample; @@ -50,6 +51,7 @@ import java.util.List; */ @ReadFilters(MalformedReadFilter.class) @PartitionBy(PartitionType.NONE) +@Downsample(by = DownsampleType.NONE) @BAQMode(QualityMode = BAQ.QualityMode.OVERWRITE_QUALS, ApplicationTime = ReadTransformer.ApplicationTime.ON_INPUT) @BQSRMode(ApplicationTime = ReadTransformer.ApplicationTime.ON_INPUT) @DocumentedGATKFeature(groupName = "Uncategorized", extraDocs = {CommandLineGATK.class}) diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/SnpEff.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/SnpEff.java index 288196d1b..8c068d3e4 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/SnpEff.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/SnpEff.java @@ -49,7 +49,7 @@ import java.util.regex.Pattern; * *

See http://snpeff.sourceforge.net/ for more information on the SnpEff tool

. * - *

For each variant, this tol chooses one of the effects of highest biological impact from the SnpEff + *

For each variant, this tool chooses one of the effects of highest biological impact from the SnpEff * output file (which must be provided on the command line via --snpEffFile filename.vcf), * and adds annotations on that effect.

* diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotator.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotator.java index f2bd6c14c..10ba4ca17 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotator.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotator.java @@ -180,9 +180,6 @@ public class VariantAnnotator extends RodWalker implements Ann @Argument(fullName="MendelViolationGenotypeQualityThreshold",shortName="mvq",required=false,doc="The genotype quality threshold in order to annotate mendelian violation ratio") public double minGenotypeQualityP = 0.0; - @Argument(fullName="requireStrictAlleleMatch", shortName="strict", doc="If provided only comp tracks that exactly match both reference and alternate alleles will be counted as concordant", required=false) - protected boolean requireStrictAlleleMatch = false; - private VariantAnnotatorEngine engine; /** @@ -204,7 +201,6 @@ public class VariantAnnotator extends RodWalker implements Ann else engine = new VariantAnnotatorEngine(annotationGroupsToUse, annotationsToUse, annotationsToExclude, this, getToolkit()); engine.initializeExpressions(expressionsToUse); - engine.setRequireStrictAlleleMatch(requireStrictAlleleMatch); // setup the header fields // note that if any of the definitions conflict with our new ones, then we want to overwrite the old ones diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java index 695868bb1..078a36dd9 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantAnnotatorEngine.java @@ -34,26 +34,23 @@ import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.*; import org.broadinstitute.sting.utils.GenomeLoc; -import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; -import org.broadinstitute.sting.utils.variant.GATKVCFUtils; -import org.broadinstitute.variant.vcf.*; import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.variant.variantcontext.*; +import org.broadinstitute.variant.vcf.*; import java.util.*; public class VariantAnnotatorEngine { - private List requestedInfoAnnotations = Collections.emptyList(); private List requestedGenotypeAnnotations = Collections.emptyList(); - private List requestedExpressions = new ArrayList(); + private List requestedExpressions = new ArrayList<>(); - private final HashMap, String> dbAnnotations = new HashMap, String>(); private final AnnotatorCompatible walker; private final GenomeAnalysisEngine toolkit; - private boolean requireStrictAlleleMatch = false; + VariantOverlapAnnotator variantOverlapAnnotator = null; protected static class VAExpression { @@ -85,7 +82,7 @@ public class VariantAnnotatorEngine { requestedInfoAnnotations = AnnotationInterfaceManager.createAllInfoFieldAnnotations(); requestedGenotypeAnnotations = AnnotationInterfaceManager.createAllGenotypeAnnotations(); excludeAnnotations(annotationsToExclude); - initializeDBs(); + initializeDBs(toolkit); } // use this constructor if you want to select specific annotations (and/or interfaces) @@ -93,14 +90,7 @@ public class VariantAnnotatorEngine { this.walker = walker; this.toolkit = toolkit; initializeAnnotations(annotationGroupsToUse, annotationsToUse, annotationsToExclude); - initializeDBs(); - } - - // experimental constructor for active region traversal - public VariantAnnotatorEngine(GenomeAnalysisEngine toolkit) { - this.walker = null; - this.toolkit = toolkit; - requestedInfoAnnotations = AnnotationInterfaceManager.createInfoFieldAnnotations(Arrays.asList("ActiveRegionBasedAnnotation"), Collections.emptyList()); + initializeDBs(toolkit); } // select specific expressions to use @@ -138,16 +128,19 @@ public class VariantAnnotatorEngine { requestedGenotypeAnnotations = tempRequestedGenotypeAnnotations; } - private void initializeDBs() { - + private void initializeDBs(final GenomeAnalysisEngine engine) { // check to see whether comp rods were included - final RodBinding dbsnp = walker.getDbsnpRodBinding(); - if ( dbsnp != null && dbsnp.isBound() ) - dbAnnotations.put(dbsnp, VCFConstants.DBSNP_KEY); + RodBinding dbSNPBinding = walker.getDbsnpRodBinding(); + if ( dbSNPBinding != null && ! dbSNPBinding.isBound() ) + dbSNPBinding = null; - final List> comps = walker.getCompRodBindings(); - for ( RodBinding rod : comps ) - dbAnnotations.put(rod, rod.getName()); + final Map, String> overlapBindings = new LinkedHashMap<>(); + for ( final RodBinding b : walker.getCompRodBindings()) + if ( b.isBound() ) overlapBindings.put(b, b.getName()); + if ( dbSNPBinding != null && ! overlapBindings.keySet().contains(VCFConstants.DBSNP_KEY) ) + overlapBindings.put(dbSNPBinding, VCFConstants.DBSNP_KEY); // add overlap detection with DBSNP by default + + variantOverlapAnnotator = new VariantOverlapAnnotator(dbSNPBinding, overlapBindings, engine.getGenomeLocParser()); } public void invokeAnnotationInitializationMethods( Set headerLines ) { @@ -161,14 +154,13 @@ public class VariantAnnotatorEngine { } public Set getVCFAnnotationDescriptions() { - Set descriptions = new HashSet(); for ( InfoFieldAnnotation annotation : requestedInfoAnnotations ) descriptions.addAll(annotation.getDescriptions()); for ( GenotypeAnnotation annotation : requestedGenotypeAnnotations ) descriptions.addAll(annotation.getDescriptions()); - for ( String db : dbAnnotations.values() ) { + for ( String db : variantOverlapAnnotator.getOverlapNames() ) { if ( VCFStandardHeaderLines.getInfoLine(db, false) != null ) descriptions.add(VCFStandardHeaderLines.getInfoLine(db)); else @@ -178,10 +170,6 @@ public class VariantAnnotatorEngine { return descriptions; } - public void setRequireStrictAlleleMatch( final boolean requireStrictAlleleMatch ) { - this.requireStrictAlleleMatch = requireStrictAlleleMatch; - } - public VariantContext annotateContext(final RefMetaDataTracker tracker, final ReferenceContext ref, final Map stratifiedContexts, @@ -192,13 +180,10 @@ public class VariantAnnotatorEngine { public VariantContext annotateContext(final RefMetaDataTracker tracker, final ReferenceContext ref, final Map stratifiedContexts, - VariantContext vc, + final VariantContext vc, final Map perReadAlleleLikelihoodMap) { Map infoAnnotations = new LinkedHashMap(vc.getAttributes()); - // annotate db occurrences - vc = annotateDBs(tracker, ref.getLocus(), vc, infoAnnotations); - // annotate expressions where available annotateExpressions(tracker, ref.getLocus(), infoAnnotations); @@ -213,11 +198,16 @@ public class VariantAnnotatorEngine { VariantContextBuilder builder = new VariantContextBuilder(vc).attributes(infoAnnotations); // annotate genotypes, creating another new VC in the process - return builder.genotypes(annotateGenotypes(tracker, ref, stratifiedContexts, vc, perReadAlleleLikelihoodMap)).make(); + final VariantContext annotated = builder.genotypes(annotateGenotypes(tracker, ref, stratifiedContexts, vc, perReadAlleleLikelihoodMap)).make(); + + // annotate db occurrences + return annotateDBs(tracker, annotated); } - public VariantContext annotateContext(final Map perReadAlleleLikelihoodMap, VariantContext vc) { - Map infoAnnotations = new LinkedHashMap(vc.getAttributes()); + public VariantContext annotateContextForActiveRegion(final RefMetaDataTracker tracker, + final Map perReadAlleleLikelihoodMap, + final VariantContext vc) { + final Map infoAnnotations = new LinkedHashMap<>(vc.getAttributes()); // go through all the requested info annotationTypes for ( InfoFieldAnnotation annotationType : requestedInfoAnnotations ) { @@ -231,76 +221,26 @@ public class VariantAnnotatorEngine { } // generate a new annotated VC - VariantContextBuilder builder = new VariantContextBuilder(vc).attributes(infoAnnotations); + final VariantContextBuilder builder = new VariantContextBuilder(vc).attributes(infoAnnotations); // annotate genotypes, creating another new VC in the process - return builder.genotypes(annotateGenotypes(null, null, null, vc, perReadAlleleLikelihoodMap)).make(); + final VariantContext annotated = builder.genotypes(annotateGenotypes(null, null, null, vc, perReadAlleleLikelihoodMap)).make(); + + // annotate db occurrences + return annotateDBs(tracker, annotated); } /** * Annotate the ID field and other DBs for the given Variant Context * * @param tracker ref meta data tracker (cannot be null) - * @param loc location of the vc * @param vc variant context to annotate * @return non-null annotated version of vc */ - @Requires({"tracker != null && loc != null && vc != null"}) - @Ensures("result != null") - public VariantContext annotateDBs(final RefMetaDataTracker tracker, final GenomeLoc loc, VariantContext vc) { - final Map newInfoAnnotations = new HashMap(0); - vc = annotateDBs(tracker, loc, vc, newInfoAnnotations); - - if ( !newInfoAnnotations.isEmpty() ) { - final VariantContextBuilder builder = new VariantContextBuilder(vc).attributes(newInfoAnnotations); - vc = builder.make(); - } - - return vc; - } - - /** - * Annotate the ID field and other DBs for the given Variant Context - * - * @param tracker ref meta data tracker (cannot be null) - * @param loc location of the vc - * @param vc variant context to annotate - * @param infoAnnotations info annotation map to populate - * @return non-null annotated version of vc - */ @Requires({"tracker != null && loc != null && vc != null && infoAnnotations != null"}) @Ensures("result != null") - private VariantContext annotateDBs(final RefMetaDataTracker tracker, final GenomeLoc loc, VariantContext vc, final Map infoAnnotations) { - for ( Map.Entry, String> dbSet : dbAnnotations.entrySet() ) { - if ( dbSet.getValue().equals(VCFConstants.DBSNP_KEY) ) { - final String rsID = GATKVCFUtils.rsIDOfFirstRealVariant(tracker.getValues(dbSet.getKey(), loc), vc.getType()); - - // add the ID if appropriate - if ( rsID != null ) { - // put the DB key into the INFO field - infoAnnotations.put(VCFConstants.DBSNP_KEY, true); - - if ( vc.emptyID() ) { - vc = new VariantContextBuilder(vc).id(rsID).make(); - } else if ( walker.alwaysAppendDbsnpId() && vc.getID().indexOf(rsID) == -1 ) { - final String newRsID = vc.getID() + VCFConstants.ID_FIELD_SEPARATOR + rsID; - vc = new VariantContextBuilder(vc).id(newRsID).make(); - } - } - } else { - boolean overlapsComp = false; - for ( VariantContext comp : tracker.getValues(dbSet.getKey(), loc) ) { - if ( !comp.isFiltered() && ( !requireStrictAlleleMatch || comp.getAlleles().equals(vc.getAlleles()) ) ) { - overlapsComp = true; - break; - } - } - if ( overlapsComp ) - infoAnnotations.put(dbSet.getValue(), overlapsComp); - } - } - - return vc; + private VariantContext annotateDBs(final RefMetaDataTracker tracker, VariantContext vc) { + return variantOverlapAnnotator.annotateOverlaps(tracker, variantOverlapAnnotator.annotateRsID(tracker, vc)); } private void annotateExpressions(final RefMetaDataTracker tracker, final GenomeLoc loc, final Map infoAnnotations) { diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotator.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotator.java new file mode 100644 index 000000000..07af4bd74 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/annotator/VariantOverlapAnnotator.java @@ -0,0 +1,224 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.walkers.annotator; + +import org.broadinstitute.sting.commandline.RodBinding; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.variant.variantcontext.Allele; +import org.broadinstitute.variant.variantcontext.VariantContext; +import org.broadinstitute.variant.variantcontext.VariantContextBuilder; +import org.broadinstitute.variant.vcf.VCFConstants; + +import java.util.*; + +/** + * Annotate the ID field and attribute overlap FLAGs for a VariantContext against a RefMetaDataTracker or a list + * of VariantContexts + */ +public final class VariantOverlapAnnotator { + final RodBinding dbSNPBinding; + final Map, String> overlapBindings; + final GenomeLocParser genomeLocParser; + + /** + * Create a new VariantOverlapAnnotator without overall bindings + * + * @see #VariantOverlapAnnotator(org.broadinstitute.sting.commandline.RodBinding, java.util.Map, org.broadinstitute.sting.utils.GenomeLocParser) + */ + public VariantOverlapAnnotator(RodBinding dbSNPBinding, GenomeLocParser genomeLocParser) { + this(dbSNPBinding, Collections., String>emptyMap(), genomeLocParser); + } + + /** + * Create a new VariantOverlapAnnotator + * + * @param dbSNPBinding the RodBinding to use for updating ID field values, or null if that behavior isn't desired + * @param overlapBindings a map of RodBindings / name to use for overlap annotation. Each binding will be used to + * add name => true for variants that overlap with variants found to a + * RefMetaDataTracker at each location. Can be empty but not null + * @param genomeLocParser the genome loc parser we'll use to create GenomeLocs for VariantContexts + */ + public VariantOverlapAnnotator(RodBinding dbSNPBinding, Map, String> overlapBindings, GenomeLocParser genomeLocParser) { + if ( overlapBindings == null ) throw new IllegalArgumentException("overlapBindings cannot be null"); + if ( genomeLocParser == null ) throw new IllegalArgumentException("genomeLocParser cannot be null"); + + this.dbSNPBinding = dbSNPBinding; + this.overlapBindings = overlapBindings; + this.genomeLocParser = genomeLocParser; + } + + /** + * Update rsID in vcToAnnotate with rsIDs from dbSNPBinding fetched from tracker + * @see #annotateOverlap(java.util.List, String, org.broadinstitute.variant.variantcontext.VariantContext) + * + * @param tracker non-null tracker, which we will use to update the rsID of vcToAnnotate + * for VariantContexts bound to dbSNPBinding that start at vcToAnnotate + * @param vcToAnnotate a variant context to annotate + * @return a VariantContext (may be == to vcToAnnotate) with updated rsID value + */ + public VariantContext annotateRsID(final RefMetaDataTracker tracker, final VariantContext vcToAnnotate) { + if ( dbSNPBinding != null ) { + final GenomeLoc loc = getLoc(vcToAnnotate); + return annotateRsID(tracker.getValues(dbSNPBinding, loc), vcToAnnotate); + } else { + return vcToAnnotate; + } + } + + /** + * Update rsID of vcToAnnotate with rsID match found in vcsAtLoc, if one exists + * + * @param vcsAtLoc a list of variant contexts starting at this location to use as sources for rsID values + * @param vcToAnnotate a variant context to annotate + * @return a VariantContext (may be == to vcToAnnotate) with updated rsID value + */ + public VariantContext annotateRsID(final List vcsAtLoc, final VariantContext vcToAnnotate ) { + final String rsID = getRsID(vcsAtLoc, vcToAnnotate); + + // add the ID if appropriate + if ( rsID != null ) { + final VariantContextBuilder vcb = new VariantContextBuilder(vcToAnnotate); + + if ( ! vcToAnnotate.hasID() ) { + return vcb.id(rsID).make(); + } else if ( ! vcToAnnotate.getID().contains(rsID) ) { + return vcb.id(vcToAnnotate.getID() + VCFConstants.ID_FIELD_SEPARATOR + rsID).make(); + } // falling through to return VC lower down + } + + // nothing to do, just return vc + return vcToAnnotate; + } + + private GenomeLoc getLoc(final VariantContext vc) { + return genomeLocParser.createGenomeLoc(vc); + } + + /** + * Add overlap attributes to vcToAnnotate against all overlapBindings in tracker + * + * @see #annotateOverlap(java.util.List, String, org.broadinstitute.variant.variantcontext.VariantContext) + * for more information + * + * @param tracker non-null tracker, which we will use to update the rsID of vcToAnnotate + * for VariantContexts bound to dbSNPBinding that start at vcToAnnotate + * @param vcToAnnotate a variant context to annotate + * @return a VariantContext (may be == to vcToAnnotate) with updated overlaps update fields value + */ + public VariantContext annotateOverlaps(final RefMetaDataTracker tracker, final VariantContext vcToAnnotate) { + if ( overlapBindings.isEmpty() ) return vcToAnnotate; + + VariantContext annotated = vcToAnnotate; + final GenomeLoc loc = getLoc(vcToAnnotate); + for ( final Map.Entry, String> overlapBinding : overlapBindings.entrySet() ) { + annotated = annotateOverlap(tracker.getValues(overlapBinding.getKey(), loc), overlapBinding.getValue(), vcToAnnotate); + } + + return annotated; + } + + /** + * Add overlaps flag attributes to vcToAnnotate binding overlapTestVCs.getSource() => true if + * an overlapping variant context can be found in overlapTestVCs with vcToAnnotate + * + * Overlaps here means that the reference alleles are the same and at least one alt + * allele in vcToAnnotate is equals to one of the alt alleles in overlapTestVCs + * + * @param overlapTestVCs a non-null list of potential overlaps that start at vcToAnnotate + * @param attributeKey the key to set to true in the attribute map for vcToAnnotate if it overlaps + * @param vcToAnnotate a non-null VariantContext to annotate + * @return + */ + public VariantContext annotateOverlap(final List overlapTestVCs, final String attributeKey, VariantContext vcToAnnotate) { + if ( overlapBindings.isEmpty() ) return vcToAnnotate; + + final boolean overlaps = overlaps(overlapTestVCs, vcToAnnotate); + if ( overlaps ) { + return new VariantContextBuilder(vcToAnnotate).attribute(attributeKey, true).make(); + } else { + return vcToAnnotate; + } + } + + /** + * Returns the ID field of the first VariantContext in rsIDSourceVCs that has the same reference allele + * as vcToAnnotate and all of the alternative alleles in vcToAnnotate. + * + * Doesn't require vcToAnnotate to be a complete match, so + * + * A/C/G in VC in rsIDSourceVCs + * + * would match the a VC with A/C but not A/T. Also we don't require all alleles to match + * so we would also match A/C/T to A/C/G. + * + * Will only match rsIDSourceVCs that aren't failing filters. + * + * @param rsIDSourceVCs a non-null list of potential overlaps that start at vcToAnnotate + * @param vcToAnnotate a non-null VariantContext to annotate + * @return a String to use for the rsID from rsIDSourceVCs if one matches, or null if none matches + */ + private String getRsID(final List rsIDSourceVCs, final VariantContext vcToAnnotate) { + if ( rsIDSourceVCs == null ) throw new IllegalArgumentException("rsIDSourceVCs cannot be null"); + if ( vcToAnnotate == null ) throw new IllegalArgumentException("vcToAnnotate cannot be null"); + + for ( final VariantContext vcComp : rsIDSourceVCs ) { + if ( vcComp.isFiltered() ) continue; // don't process any failed VCs + + if ( ! vcComp.getChr().equals(vcToAnnotate.getChr()) || vcComp.getStart() != vcToAnnotate.getStart() ) + throw new IllegalArgumentException("source rsID VariantContext " + vcComp + " doesn't start at the same position as vcToAnnotate " + vcToAnnotate); + + if ( vcToAnnotate.getReference().equals(vcComp.getReference()) ) { + for ( final Allele allele : vcToAnnotate.getAlternateAlleles() ) { + if ( vcComp.getAlternateAlleles().contains(allele) ) + return vcComp.getID(); + } + } + } + + return null; + } + + /** + * Does vcToAnnotate overlap with any of the records in potentialOverlaps? + * + * @param potentialOverlaps a non-null list of potential overlaps that start at vcToAnnotate + * @param vcToAnnotate a non-null VariantContext to annotate + * @return true if vcToAnnotate overlaps (position and all alt alleles) with some variant in potentialOverlaps + */ + private boolean overlaps(final List potentialOverlaps, final VariantContext vcToAnnotate) { + return getRsID(potentialOverlaps, vcToAnnotate) != null; + } + + /** + * Get the collection of the RodBinding names for those being used for overlap detection + * @return a non-null collection of Strings + */ + public Collection getOverlapNames() { + return overlapBindings.values(); + } +} diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/beagle/BeagleOutputToVCF.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/beagle/BeagleOutputToVCF.java index 15bd79586..7d5ad9b8a 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/beagle/BeagleOutputToVCF.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/beagle/BeagleOutputToVCF.java @@ -129,6 +129,9 @@ public class BeagleOutputToVCF extends RodWalker { private final double MIN_PROB_ERROR = 0.000001; private final double MAX_GENOTYPE_QUALITY = -6.0; + private final static String BEAGLE_MONO_FILTER_STRING = "BGL_SET_TO_MONOMORPHIC"; + private final static String ORIGINAL_ALT_ALLELE_INFO_KEY = "OriginalAltAllele"; + public void initialize() { // setup the header fields @@ -138,10 +141,8 @@ public class BeagleOutputToVCF extends RodWalker { hInfo.add(new VCFFormatHeaderLine("OG",1, VCFHeaderLineType.String, "Original Genotype input to Beagle")); hInfo.add(new VCFInfoHeaderLine("R2", 1, VCFHeaderLineType.Float, "r2 Value reported by Beagle on each site")); hInfo.add(new VCFInfoHeaderLine("NumGenotypesChanged", 1, VCFHeaderLineType.Integer, "The number of genotypes changed by Beagle")); - hInfo.add(new VCFFilterHeaderLine("BGL_RM_WAS_A", "This 'A' site was set to monomorphic by Beagle")); - hInfo.add(new VCFFilterHeaderLine("BGL_RM_WAS_C", "This 'C' site was set to monomorphic by Beagle")); - hInfo.add(new VCFFilterHeaderLine("BGL_RM_WAS_G", "This 'G' site was set to monomorphic by Beagle")); - hInfo.add(new VCFFilterHeaderLine("BGL_RM_WAS_T", "This 'T' site was set to monomorphic by Beagle")); + hInfo.add(new VCFInfoHeaderLine(ORIGINAL_ALT_ALLELE_INFO_KEY, 1, VCFHeaderLineType.String, "The original alt allele for a site set to monomorphic by Beagle")); + hInfo.add(new VCFFilterHeaderLine(BEAGLE_MONO_FILTER_STRING, "This site was set to monomorphic by Beagle")); if ( comp.isBound() ) { hInfo.add(new VCFInfoHeaderLine("ACH", 1, VCFHeaderLineType.Integer, "Allele Count from Comparison ROD at this site")); @@ -335,9 +336,8 @@ public class BeagleOutputToVCF extends RodWalker { final VariantContextBuilder builder = new VariantContextBuilder(vc_input).source("outputvcf").genotypes(genotypes); if ( ! ( beagleVarCounts > 0 || DONT_FILTER_MONOMORPHIC_SITES ) ) { - Set removedFilters = vc_input.filtersWereApplied() ? new HashSet(vc_input.getFilters()) : new HashSet(1); - removedFilters.add(String.format("BGL_RM_WAS_%s",vc_input.getAlternateAllele(0))); - builder.alleles(new HashSet(Arrays.asList(vc_input.getReference()))).filters(removedFilters); + builder.attribute(ORIGINAL_ALT_ALLELE_INFO_KEY, vc_input.getAlternateAllele(0)); + builder.alleles(Collections.singleton(vc_input.getReference())).filter(BEAGLE_MONO_FILTER_STRING); } // re-compute chromosome counts diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/qc/CountReads.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/qc/CountReads.java index 825fcac90..45beea28f 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/qc/CountReads.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/qc/CountReads.java @@ -66,11 +66,16 @@ import org.broadinstitute.sting.utils.sam.GATKSAMRecord; */ @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_QC, extraDocs = {CommandLineGATK.class} ) @Requires({DataSource.READS, DataSource.REFERENCE}) -public class CountReads extends ReadWalker implements NanoSchedulable { +public class CountReads extends ReadWalker implements NanoSchedulable { public Integer map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker tracker) { return 1; } - @Override public Integer reduceInit() { return 0; } - @Override public Integer reduce(Integer value, Integer sum) { return value + sum; } + @Override public Long reduceInit() { return 0L; } + + public Long reduce(Integer value, Long sum) { return (long) value + sum; } + + public void onTraversalDone(Long result) { + logger.info("CountReads counted " + result + " reads in the traversal"); + } } diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/DownsampleReadsQC.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/DownsampleReadsQC.java deleted file mode 100644 index 1141a9164..000000000 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/DownsampleReadsQC.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (c) 2012 The Broad Institute - * - * Permission is hereby granted, free of charge, to any person - * obtaining a copy of this software and associated documentation - * files (the "Software"), to deal in the Software without - * restriction, including without limitation the rights to use, - * copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following - * conditions: - * - * The above copyright notice and this permission notice shall be - * included in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES - * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT - * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, - * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR - * THE USE OR OTHER DEALINGS IN THE SOFTWARE. - */ - -package org.broadinstitute.sting.gatk.walkers.readutils; - -import org.broadinstitute.sting.commandline.Argument; -import org.broadinstitute.sting.commandline.Output; -import org.broadinstitute.sting.gatk.contexts.ReferenceContext; -import org.broadinstitute.sting.gatk.downsampling.DownsamplingUtils; -import org.broadinstitute.sting.gatk.io.StingSAMFileWriter; -import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; -import org.broadinstitute.sting.gatk.walkers.DataSource; -import org.broadinstitute.sting.gatk.walkers.NanoSchedulable; -import org.broadinstitute.sting.gatk.walkers.ReadWalker; -import org.broadinstitute.sting.gatk.walkers.Requires; -import org.broadinstitute.sting.utils.sam.GATKSAMRecord; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.LinkedList; - -/** - */ -@Requires({DataSource.READS, DataSource.REFERENCE}) -public class DownsampleReadsQC extends ReadWalker> implements NanoSchedulable { - @Output(doc="Write output to this BAM filename instead of STDOUT", required = true) - StingSAMFileWriter out; - - @Argument(fullName = "minReadsPerAlignmentStart", shortName = "minReadsPerAlignmentStart", doc ="", required = false) - private int minReadsPerAlignmentStart = 5; - - @Argument(fullName = "downsampleTo", shortName = "downsampleTo", doc ="", required = false) - private int downsampleTo = 1000; - - /** - * The initialize function. - */ - public void initialize() { -// final boolean preSorted = true; -// if (getToolkit() != null && getToolkit().getArguments().BQSR_RECAL_FILE != null && !NO_PG_TAG ) { -// Utils.setupWriter(out, getToolkit(), getToolkit().getSAMFileHeader(), !preSorted, keep_records, this, PROGRAM_RECORD_NAME); -// } - } - - /** - * The reads map function. - * - * @param ref the reference bases that correspond to our read, if a reference was provided - * @param readIn the read itself, as a GATKSAMRecord - * @return the read itself - */ - public GATKSAMRecord map( ReferenceContext ref, GATKSAMRecord readIn, RefMetaDataTracker metaDataTracker ) { - return readIn; - } - - /** - * reduceInit is called once before any calls to the map function. We use it here to setup the output - * bam file, if it was specified on the command line - * - * @return SAMFileWriter, set to the BAM output file if the command line option was set, null otherwise - */ - public Collection reduceInit() { - return new LinkedList(); - } - - /** - * given a read and a output location, reduce by emitting the read - * - * @param read the read itself - * @param output the output source - * @return the SAMFileWriter, so that the next reduce can emit to the same source - */ - public Collection reduce( GATKSAMRecord read, Collection output ) { - output.add(read); - return output; - } - - @Override - public void onTraversalDone(Collection result) { - for ( final GATKSAMRecord read : DownsamplingUtils.levelCoverageByPosition(new ArrayList(result), downsampleTo, minReadsPerAlignmentStart) ) - out.addAlignment(read); - } -} diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/PrintReads.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/PrintReads.java index a28523369..c7ed0bffd 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/PrintReads.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/readutils/PrintReads.java @@ -96,7 +96,7 @@ import java.util.*; * -T PrintReads \ * -o output.bam \ * -I input.bam \ - * -ds 0.25 + * -dfrac 0.25 * * */ @@ -124,12 +124,6 @@ public class PrintReads extends ReadWalker impleme @Argument(fullName = "number", shortName = "n", doc="Print the first n reads from the file, discarding the rest", required = false) int nReadsToPrint = -1; - /** - * Downsamples the bam file by the given ratio, printing only approximately the given percentage of reads. The downsampling is balanced (over the entire coverage) - */ - @Argument(fullName = "downsample_coverage", shortName = "ds", doc="Downsample BAM to desired coverage", required = false) - public double downsampleRatio = 1.0; - /** * Only reads from samples listed in the provided file(s) will be included in the output. */ @@ -237,8 +231,7 @@ public class PrintReads extends ReadWalker impleme nReadsToPrint--; // n > 0 means there are still reads to be printed. } - // if downsample option is turned off (= 1) then don't waste time getting the next random number. - return (downsampleRatio == 1 || random.nextDouble() < downsampleRatio); + return true; } /** diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordance.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordance.java index 10397d718..da8b20c66 100755 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordance.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/GenotypeConcordance.java @@ -67,8 +67,58 @@ import java.util.*; * *

Output

* Genotype Concordance writes a GATK report to the specified file (via -o) , consisting of multiple tables of counts - * and proportions. These tables may be optionally moltenized via the -moltenize argument. + * and proportions. These tables may be optionally moltenized via the -moltenize argument. That is, the standard table * + * Sample NO_CALL_HOM_REF NO_CALL_HET NO_CALL_HOM_VAR (...) + * NA12878 0.003 0.001 0.000 (...) + * NA12891 0.005 0.000 0.000 (...) + * + * would instead be displayed + * + * NA12878 NO_CALL_HOM_REF 0.003 + * NA12878 NO_CALL_HET 0.001 + * NA12878 NO_CALL_HOM_VAR 0.000 + * NA12891 NO_CALL_HOM_REF 0.005 + * NA12891 NO_CALL_HET 0.000 + * NA12891 NO_CALL_HOM_VAR 0.000 + * (...) + * + * + * These tables are constructed on a per-sample basis, and include counts of eval vs comp genotype states, and the + * number of times the alternate alleles between the eval and comp sample did not match up. + * + * In addition, Genotype Concordance produces site-level allelic concordance. For strictly bi-allelic VCFs, + * only the ALLELES_MATCH, EVAL_ONLY, TRUTH_ONLY fields will be populated, but where multi-allelic sites are involved + * counts for EVAL_SUBSET_TRUTH and EVAL_SUPERSET_TRUTH will be generated. + * + * For example, in the following situation + * eval: ref - A alt - C + * comp: ref - A alt - C,T + * then the site is tabulated as EVAL_SUBSET_TRUTH. Were the situation reversed, it would be EVAL_SUPERSET_TRUTH. + * However, in the case where eval has both C and T alternate alleles, both must be observed in the genotypes + * (that is, there must be at least one of (0/1,1/1) and at least one of (0/2,1/2,2/2) in the genotype field). If + * one of the alleles has no observations in the genotype fields of the eval, the site-level concordance is + * tabulated as though that allele were not present in the record. + * + *

Monomorphic Records

+ * A site which has an alternate allele, but which is monomorphic in samples, is treated as not having been + * discovered, and will be recorded in the TRUTH_ONLY column (if a record exists in the comp VCF), or not at all + * (if no record exists in the comp VCF). + * + * That is, in the situation + * eval: ref - A alt - C genotypes - 0/0 0/0 0/0 ... 0/0 + * comp: ref - A alt - C ... 0/0 0/0 ... + * is equivalent to + * eval: ref - A alt - . genotypes - 0/0 0/0 0/0 ... 0/0 + * comp: ref - A alt - C ... 0/0 0/0 ... + * + * When a record is present in the comp VCF the *genotypes* for the monomorphic site will still be used to evaluate + * per-sample genotype concordance counts. + * + *

Filtered Records

+ * Filtered records are treated as though they were not present in the VCF, unless -ignoreSiteFilters is provided, + * in which case all records are used. There is currently no way to assess concordance metrics on filtered sites + * exclusively. SelectVariants can be used to extract filtered sites, and VariantFiltration used to un-filter them. */ @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARMANIP, extraDocs = {CommandLineGATK.class} ) public class GenotypeConcordance extends RodWalker>,ConcordanceMetrics> { diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToBinaryPed.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToBinaryPed.java index 8d16e6ca2..c414b443e 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToBinaryPed.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToBinaryPed.java @@ -176,6 +176,7 @@ public class VariantsToBinaryPed extends RodWalker { // Cut down on memory. try { File temp = File.createTempFile("VariantsToBPed_"+sample, ".tmp"); + temp.deleteOnExit(); printMap.put(sample,new PrintStream(temp)); tempFiles.put(sample,temp); } catch (IOException e) { diff --git a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToVCF.java b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToVCF.java index 60809134a..dbb68961f 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToVCF.java +++ b/public/java/src/org/broadinstitute/sting/gatk/walkers/variantutils/VariantsToVCF.java @@ -39,6 +39,7 @@ import org.broadinstitute.sting.gatk.refdata.utils.GATKFeature; import org.broadinstitute.sting.gatk.walkers.Reference; import org.broadinstitute.sting.gatk.walkers.RodWalker; import org.broadinstitute.sting.gatk.walkers.Window; +import org.broadinstitute.sting.gatk.walkers.annotator.VariantOverlapAnnotator; import org.broadinstitute.sting.utils.BaseUtils; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.SampleUtils; @@ -112,24 +113,21 @@ public class VariantsToVCF extends RodWalker { // for dealing with indels in hapmap CloseableIterator dbsnpIterator = null; + VariantOverlapAnnotator variantOverlapAnnotator = null; public void initialize() { vcfwriter = VariantContextWriterFactory.sortOnTheFly(baseWriter, 40, false); + variantOverlapAnnotator = new VariantOverlapAnnotator(dbsnp.dbsnp, getToolkit().getGenomeLocParser()); } public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { if ( tracker == null || !BaseUtils.isRegularBase(ref.getBase()) ) return 0; - String rsID = dbsnp == null ? null : GATKVCFUtils.rsIDOfFirstRealVariant(tracker.getValues(dbsnp.dbsnp, context.getLocation()), VariantContext.Type.SNP); - Collection contexts = getVariantContexts(tracker, ref); for ( VariantContext vc : contexts ) { VariantContextBuilder builder = new VariantContextBuilder(vc); - if ( rsID != null && vc.emptyID() ) { - builder.id(rsID).make(); - } // set the appropriate sample name if necessary if ( sampleName != null && vc.hasGenotypes() && vc.hasGenotype(variants.getName()) ) { @@ -137,7 +135,8 @@ public class VariantsToVCF extends RodWalker { builder.genotypes(g); } - writeRecord(builder.make(), tracker, ref.getLocus()); + final VariantContext withID = variantOverlapAnnotator.annotateRsID(tracker, builder.make()); + writeRecord(withID, tracker, ref.getLocus()); } return 1; diff --git a/public/java/src/org/broadinstitute/sting/tools/CatVariants.java b/public/java/src/org/broadinstitute/sting/tools/CatVariants.java index ad77b2548..b59786d15 100644 --- a/public/java/src/org/broadinstitute/sting/tools/CatVariants.java +++ b/public/java/src/org/broadinstitute/sting/tools/CatVariants.java @@ -144,15 +144,13 @@ public class CatVariants extends CommandLineProgram { BasicConfigurator.configure(); logger.setLevel(Level.INFO); - if ( ! refFile.getName().endsWith(".fasta")) { - throw new UserException("Reference file "+refFile+"name must end with .fasta"); + final ReferenceSequenceFile ref; + try { + ref = ReferenceSequenceFileFactory.getReferenceSequenceFile(refFile); + } catch ( Exception e ) { + throw new UserException("Couldn't load provided reference sequence file " + refFile, e); } - if ( ! refFile.exists() ) { - throw new UserException(String.format("Reference file %s does not exist", refFile.getAbsolutePath())); - } - - // Comparator>> comparator = new PositionComparator(); Comparator> positionComparator = new PositionComparator(); @@ -203,8 +201,6 @@ public class CatVariants extends CommandLineProgram { if (!(outputFile.getName().endsWith(".vcf") || outputFile.getName().endsWith(".VCF"))){ throw new UserException(String.format("Output file %s should be .vcf", outputFile)); } - ReferenceSequenceFile ref = ReferenceSequenceFileFactory.getReferenceSequenceFile(refFile); - FileOutputStream outputStream = new FileOutputStream(outputFile); EnumSet options = EnumSet.of(Options.INDEX_ON_THE_FLY); diff --git a/public/java/src/org/broadinstitute/sting/utils/MathUtils.java b/public/java/src/org/broadinstitute/sting/utils/MathUtils.java index 38c131bc6..07aff5983 100644 --- a/public/java/src/org/broadinstitute/sting/utils/MathUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/MathUtils.java @@ -29,8 +29,8 @@ import com.google.java.contract.Ensures; import com.google.java.contract.Requires; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; -import org.broadinstitute.sting.utils.exceptions.UserException; +import javax.annotation.Nullable; import java.math.BigDecimal; import java.util.*; @@ -54,15 +54,19 @@ public class MathUtils { private static final double JACOBIAN_LOG_TABLE_INV_STEP = 1.0 / JACOBIAN_LOG_TABLE_STEP; private static final double MAX_JACOBIAN_TOLERANCE = 8.0; private static final int JACOBIAN_LOG_TABLE_SIZE = (int) (MAX_JACOBIAN_TOLERANCE / JACOBIAN_LOG_TABLE_STEP) + 1; - private static final int MAXN = 70000; + private static final int MAXN = 70_000; private static final int LOG10_CACHE_SIZE = 4 * MAXN; // we need to be able to go up to 2*(2N) when calculating some of the coefficients /** * The smallest log10 value we'll emit from normalizeFromLog10 and other functions * where the real-space value is 0.0. */ - public final static double LOG10_P_OF_ZERO = -1000000.0; - public final static double FAIR_BINOMIAL_PROB_LOG10_0_5 = Math.log10(0.5); + public static final double LOG10_P_OF_ZERO = -1000000.0; + public static final double FAIR_BINOMIAL_PROB_LOG10_0_5 = Math.log10(0.5); + public static final double LOG_ONE_HALF = -Math.log10(2.0); + public static final double LOG_ONE_THIRD = -Math.log10(3.0); + private static final double NATURAL_LOG_OF_TEN = Math.log(10.0); + private static final double SQUARE_ROOT_OF_TWO_TIMES_PI = Math.sqrt(2.0 * Math.PI); static { log10Cache = new double[LOG10_CACHE_SIZE]; @@ -203,15 +207,16 @@ public class MathUtils { } /** - * Converts a real space array of probabilities into a log10 array + * Converts a real space array of numbers (typically probabilities) into a log10 array * * @param prRealSpace * @return */ public static double[] toLog10(final double[] prRealSpace) { double[] log10s = new double[prRealSpace.length]; - for (int i = 0; i < prRealSpace.length; i++) + for (int i = 0; i < prRealSpace.length; i++) { log10s[i] = Math.log10(prRealSpace[i]); + } return log10s; } @@ -227,6 +232,9 @@ public class MathUtils { return maxValue; for (int i = start; i < finish; i++) { + if ( Double.isNaN(log10p[i]) || log10p[i] == Double.POSITIVE_INFINITY ) { + throw new IllegalArgumentException("log10p: Values must be non-infinite and non-NAN"); + } sum += Math.pow(10.0, log10p[i] - maxValue); } @@ -235,9 +243,6 @@ public class MathUtils { public static double sumLog10(final double[] log10values) { return Math.pow(10.0, log10sumLog10(log10values)); - // double s = 0.0; - // for ( double v : log10values) s += Math.pow(10.0, v); - // return s; } public static double log10sumLog10(final double[] log10values) { @@ -301,12 +306,50 @@ public class MathUtils { return 1; } - public static double NormalDistribution(final double mean, final double sd, final double x) { + /** + * Calculate f(x) = Normal(x | mu = mean, sigma = sd) + * @param mean the desired mean of the Normal distribution + * @param sd the desired standard deviation of the Normal distribution + * @param x the value to evaluate + * @return a well-formed double + */ + public static double normalDistribution(final double mean, final double sd, final double x) { + if( sd < 0 ) + throw new IllegalArgumentException("sd: Standard deviation of normal must be >0"); + if ( ! wellFormedDouble(mean) || ! wellFormedDouble(sd) || ! wellFormedDouble(x) ) + throw new IllegalArgumentException("mean, sd, or, x : Normal parameters must be well formatted (non-INF, non-NAN)"); double a = 1.0 / (sd * Math.sqrt(2.0 * Math.PI)); double b = Math.exp(-1.0 * (Math.pow(x - mean, 2.0) / (2.0 * sd * sd))); return a * b; } + /** + * Calculate f(x) = log10 ( Normal(x | mu = mean, sigma = sd) ) + * @param mean the desired mean of the Normal distribution + * @param sd the desired standard deviation of the Normal distribution + * @param x the value to evaluate + * @return a well-formed double + */ + + public static double normalDistributionLog10(final double mean, final double sd, final double x) { + if( sd < 0 ) + throw new IllegalArgumentException("sd: Standard deviation of normal must be >0"); + if ( ! wellFormedDouble(mean) || ! wellFormedDouble(sd) || ! wellFormedDouble(x) ) + throw new IllegalArgumentException("mean, sd, or, x : Normal parameters must be well formatted (non-INF, non-NAN)"); + final double a = -1.0 * Math.log10(sd * SQUARE_ROOT_OF_TWO_TIMES_PI); + final double b = -1.0 * (square(x - mean) / (2.0 * square(sd))) / NATURAL_LOG_OF_TEN; + return a + b; + } + + /** + * Calculate f(x) = x^2 + * @param x the value to square + * @return x * x + */ + public static double square(final double x) { + return x * x; + } + /** * Calculates the log10 of the binomial coefficient. Designed to prevent * overflows even with very large numbers. @@ -323,6 +366,13 @@ public class MathUtils { * @see #binomialCoefficient(int, int) with log10 applied to result */ public static double log10BinomialCoefficient(final int n, final int k) { + if ( n < 0 ) { + throw new IllegalArgumentException("n: Must have non-negative number of trials"); + } + if ( k > n || k < 0 ) { + throw new IllegalArgumentException("k: Must have non-negative number of successes, and no more successes than number of trials"); + } + return log10Factorial(n) - log10Factorial(k) - log10Factorial(n - k); } @@ -346,6 +396,8 @@ public class MathUtils { * @see #binomialProbability(int, int, double) with log10 applied to result */ public static double log10BinomialProbability(final int n, final int k, final double log10p) { + if ( log10p > 1e-18 ) + throw new IllegalArgumentException("log10p: Log-probability must be 0 or less"); double log10OneMinusP = Math.log10(1 - Math.pow(10, log10p)); return log10BinomialCoefficient(n, k) + log10p * k + log10OneMinusP * (n - k); } @@ -364,9 +416,35 @@ public class MathUtils { return log10BinomialCoefficient(n, k) + (n * FAIR_BINOMIAL_PROB_LOG10_0_5); } + /** A memoization container for {@link #binomialCumulativeProbability(int, int, int)}. Synchronized to accomodate multithreading. */ + private static final Map BINOMIAL_CUMULATIVE_PROBABILITY_MEMOIZATION_CACHE = + Collections.synchronizedMap(new LRUCache(10_000)); + + /** + * Primitive integer-triplet bijection into long. Returns null when the bijection function fails (in lieu of an exception), which will + * happen when: any value is negative or larger than a short. This method is optimized for speed; it is not intended to serve as a + * utility function. + */ + @Nullable + static Long fastGenerateUniqueHashFromThreeIntegers(final int one, final int two, final int three) { + if (one < 0 || two < 0 || three < 0 || Short.MAX_VALUE < one || Short.MAX_VALUE < two || Short.MAX_VALUE < three) { + return null; + } else { + long result = 0; + result += (short) one; + result <<= 16; + result += (short) two; + result <<= 16; + result += (short) three; + return result; + } + } + /** * Performs the cumulative sum of binomial probabilities, where the probability calculation is done in log space. * Assumes that the probability of a successful hit is fair (i.e. 0.5). + * + * This pure function is memoized because of its expensive BigDecimal calculations. * * @param n number of attempts for the number of hits * @param k_start start (inclusive) of the cumulant sum (over hits) @@ -377,23 +455,41 @@ public class MathUtils { if ( k_end > n ) throw new IllegalArgumentException(String.format("Value for k_end (%d) is greater than n (%d)", k_end, n)); - double cumProb = 0.0; - double prevProb; - BigDecimal probCache = BigDecimal.ZERO; - - for (int hits = k_start; hits <= k_end; hits++) { - prevProb = cumProb; - final double probability = binomialProbability(n, hits); - cumProb += probability; - if (probability > 0 && cumProb - prevProb < probability / 2) { // loss of precision - probCache = probCache.add(new BigDecimal(prevProb)); - cumProb = 0.0; - hits--; // repeat loop - // prevProb changes at start of loop - } + // Fetch cached value, if applicable. + final Long memoizationKey = fastGenerateUniqueHashFromThreeIntegers(n, k_start, k_end); + final Double memoizationCacheResult; + if (memoizationKey != null) { + memoizationCacheResult = BINOMIAL_CUMULATIVE_PROBABILITY_MEMOIZATION_CACHE.get(memoizationKey); + } else { + memoizationCacheResult = null; } - return probCache.add(new BigDecimal(cumProb)).doubleValue(); + final double result; + if (memoizationCacheResult != null) { + result = memoizationCacheResult; + } else { + double cumProb = 0.0; + double prevProb; + BigDecimal probCache = BigDecimal.ZERO; + + for (int hits = k_start; hits <= k_end; hits++) { + prevProb = cumProb; + final double probability = binomialProbability(n, hits); + cumProb += probability; + if (probability > 0 && cumProb - prevProb < probability / 2) { // loss of precision + probCache = probCache.add(new BigDecimal(prevProb)); + cumProb = 0.0; + hits--; // repeat loop + // prevProb changes at start of loop + } + } + + result = probCache.add(new BigDecimal(cumProb)).doubleValue(); + if (memoizationKey != null) { + BINOMIAL_CUMULATIVE_PROBABILITY_MEMOIZATION_CACHE.put(memoizationKey, result); + } + } + return result; } /** @@ -405,10 +501,20 @@ public class MathUtils { * @return */ public static double log10MultinomialCoefficient(final int n, final int[] k) { + if ( n < 0 ) + throw new IllegalArgumentException("n: Must have non-negative number of trials"); double denominator = 0.0; + int sum = 0; for (int x : k) { + if ( x < 0 ) + throw new IllegalArgumentException("x element of k: Must have non-negative observations of group"); + if ( x > n ) + throw new IllegalArgumentException("x element of k, n: Group observations must be bounded by k"); denominator += log10Factorial(x); + sum += x; } + if ( sum != n ) + throw new IllegalArgumentException("k and n: Sum of observations in multinomial must sum to total number of trials"); return log10Factorial(n) - denominator; } @@ -423,9 +529,11 @@ public class MathUtils { */ public static double log10MultinomialProbability(final int n, final int[] k, final double[] log10p) { if (log10p.length != k.length) - throw new UserException.BadArgumentValue("p and k", "Array of log10 probabilities must have the same size as the array of number of sucesses: " + log10p.length + ", " + k.length); + throw new IllegalArgumentException("p and k: Array of log10 probabilities must have the same size as the array of number of sucesses: " + log10p.length + ", " + k.length); double log10Prod = 0.0; for (int i = 0; i < log10p.length; i++) { + if ( log10p[i] > 1e-18 ) + throw new IllegalArgumentException("log10p: Log-probability must be <= 0"); log10Prod += log10p[i] * k[i]; } return log10MultinomialCoefficient(n, k) + log10Prod; @@ -468,7 +576,7 @@ public class MathUtils { */ public static double multinomialProbability(final int[] k, final double[] p) { if (p.length != k.length) - throw new UserException.BadArgumentValue("p and k", "Array of log10 probabilities must have the same size as the array of number of sucesses: " + p.length + ", " + k.length); + throw new IllegalArgumentException("p and k: Array of log10 probabilities must have the same size as the array of number of sucesses: " + p.length + ", " + k.length); int n = 0; double[] log10P = new double[p.length]; @@ -791,11 +899,8 @@ public class MathUtils { break; sum += x; i++; - //System.out.printf(" %d/%d", sum, i); } - //System.out.printf("Sum = %d, n = %d, maxI = %d, avg = %f%n", sum, i, maxI, (1.0 * sum) / i); - return (1.0 * sum) / i; } @@ -1291,7 +1396,7 @@ public class MathUtils { } /** - * Compute in a numerical correct way the quanity log10(1-x) + * Compute in a numerical correct way the quantity log10(1-x) * * Uses the approximation log10(1-x) = log10(1/x - 1) + log10(x) to avoid very quick underflow * in 1-x when x is very small diff --git a/public/java/src/org/broadinstitute/sting/utils/Utils.java b/public/java/src/org/broadinstitute/sting/utils/Utils.java index 73a538ee5..75bd6a3d1 100644 --- a/public/java/src/org/broadinstitute/sting/utils/Utils.java +++ b/public/java/src/org/broadinstitute/sting/utils/Utils.java @@ -683,6 +683,36 @@ public class Utils { return denom == 0 ? "NA" : String.format("%.2f", num / (1.0 * denom)); } + /** + * Adds element from an array into a collection. + * + * In the event of exception being throw due to some element, dest might have been modified by + * the successful addition of element before that one. + * + * @param dest the destination collection which cannot be null and should be able to accept + * the input elements. + * @param elements the element to add to dest + * @param collection type element. + * @throws UnsupportedOperationException if the add operation + * is not supported by dest. + * @throws ClassCastException if the class of any of the elements + * prevents it from being added to dest. + * @throws NullPointerException if any of the elements is null and dest + * does not permit null elements + * @throws IllegalArgumentException if some property of any of the elements + * prevents it from being added to this collection + * @throws IllegalStateException if any of the elements cannot be added at this + * time due to insertion restrictions. + * @return true if the collection was modified as a result. + */ + public static boolean addAll(Collection dest, T ... elements) { + boolean result = false; + for (final T e : elements) { + result = dest.add(e) | result; + } + return result; + } + /** * Create a constant map that maps each value in values to itself */ diff --git a/public/java/src/org/broadinstitute/sting/utils/activeregion/ActiveRegion.java b/public/java/src/org/broadinstitute/sting/utils/activeregion/ActiveRegion.java index 2f4c1b55d..7f2fe6833 100644 --- a/public/java/src/org/broadinstitute/sting/utils/activeregion/ActiveRegion.java +++ b/public/java/src/org/broadinstitute/sting/utils/activeregion/ActiveRegion.java @@ -336,13 +336,17 @@ public class ActiveRegion implements HasGenomeLocation { /** * Remove all of the reads in readsToRemove from this active region - * @param readsToRemove the collection of reads we want to remove + * @param readsToRemove the set of reads we want to remove */ - public void removeAll( final Collection readsToRemove ) { - reads.removeAll(readsToRemove); + public void removeAll( final Set readsToRemove ) { + final Iterator it = reads.iterator(); spanIncludingReads = extendedLoc; - for ( final GATKSAMRecord read : reads ) { - spanIncludingReads = spanIncludingReads.union( genomeLocParser.createGenomeLoc(read) ); + while ( it.hasNext() ) { + final GATKSAMRecord read = it.next(); + if ( readsToRemove.contains(read) ) + it.remove(); + else + spanIncludingReads = spanIncludingReads.union( genomeLocParser.createGenomeLoc(read) ); } } diff --git a/public/java/src/org/broadinstitute/sting/utils/activeregion/BandPassActivityProfile.java b/public/java/src/org/broadinstitute/sting/utils/activeregion/BandPassActivityProfile.java index f2bc86dfc..f352bc332 100644 --- a/public/java/src/org/broadinstitute/sting/utils/activeregion/BandPassActivityProfile.java +++ b/public/java/src/org/broadinstitute/sting/utils/activeregion/BandPassActivityProfile.java @@ -31,7 +31,6 @@ import org.broadinstitute.sting.utils.GenomeLocParser; import org.broadinstitute.sting.utils.GenomeLocSortedSet; import org.broadinstitute.sting.utils.MathUtils; -import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; @@ -108,7 +107,7 @@ public class BandPassActivityProfile extends ActivityProfile { final int bandSize = 2 * filterSize + 1; final double[] kernel = new double[bandSize]; for( int iii = 0; iii < bandSize; iii++ ) { - kernel[iii] = MathUtils.NormalDistribution(filterSize, sigma, iii); + kernel[iii] = MathUtils.normalDistribution(filterSize, sigma, iii); } return MathUtils.normalizeFromRealSpace(kernel); } diff --git a/public/java/src/org/broadinstitute/sting/utils/clipping/ClippingOp.java b/public/java/src/org/broadinstitute/sting/utils/clipping/ClippingOp.java index f51881e0b..836c16a7e 100644 --- a/public/java/src/org/broadinstitute/sting/utils/clipping/ClippingOp.java +++ b/public/java/src/org/broadinstitute/sting/utils/clipping/ClippingOp.java @@ -35,6 +35,7 @@ import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import java.util.Iterator; +import java.util.List; import java.util.Stack; import java.util.Vector; @@ -193,9 +194,17 @@ public class ClippingOp { unclippedCigar.add(new CigarElement(matchesCount, CigarOperator.MATCH_OR_MISMATCH)); unclipped.setCigar(unclippedCigar); - unclipped.setAlignmentStart(read.getAlignmentStart() + calculateAlignmentStartShift(read.getCigar(), unclippedCigar)); + final int newStart = read.getAlignmentStart() + calculateAlignmentStartShift(read.getCigar(), unclippedCigar); + unclipped.setAlignmentStart(newStart); - return unclipped; + if ( newStart <= 0 ) { + // if the start of the unclipped read occurs before the contig, + // we must hard clip away the bases since we cannot represent reads with + // negative or 0 alignment start values in the SAMRecord (e.g., 0 means unaligned) + return hardClip(unclipped, 0, - newStart); + } else { + return unclipped; + } } /** @@ -334,7 +343,24 @@ public class ClippingOp { return newCigar; } - @Requires({"start <= stop", "start == 0 || stop == read.getReadLength() - 1"}) + /** + * Hard clip bases from read, from start to stop in base coordinates + * + * If start == 0, then we will clip from the front of the read, otherwise we clip + * from the right. If start == 0 and stop == 10, this would clip out the first + * 10 bases of the read. + * + * Note that this function works with reads with negative alignment starts, in order to + * allow us to hardClip reads that have had their soft clips reverted and so might have + * negative alignment starts + * + * Works properly with reduced reads and insertion/deletion base qualities + * + * @param read a non-null read + * @param start a start >= 0 and < read.length + * @param stop a stop >= 0 and < read.length. + * @return a cloned version of read that has been properly trimmed down + */ private GATKSAMRecord hardClip(GATKSAMRecord read, int start, int stop) { final int firstBaseAfterSoftClips = read.getAlignmentStart() - read.getSoftStart(); final int lastBaseBeforeSoftClips = read.getSoftEnd() - read.getSoftStart(); @@ -342,7 +368,6 @@ public class ClippingOp { if (start == firstBaseAfterSoftClips && stop == lastBaseBeforeSoftClips) // note that if the read has no soft clips, these constants will be 0 and read length - 1 (beauty of math). return GATKSAMRecord.emptyRead(read); - // If the read is unmapped there is no Cigar string and neither should we create a new cigar string CigarShift cigarShift = (read.getReadUnmappedFlag()) ? new CigarShift(new Cigar(), 0, 0) : hardClipCigar(read.getCigar(), start, stop); @@ -356,7 +381,7 @@ public class ClippingOp { System.arraycopy(read.getReadBases(), copyStart, newBases, 0, newLength); System.arraycopy(read.getBaseQualities(), copyStart, newQuals, 0, newLength); - GATKSAMRecord hardClippedRead; + final GATKSAMRecord hardClippedRead; try { hardClippedRead = (GATKSAMRecord) read.clone(); } catch (CloneNotSupportedException e) { @@ -559,26 +584,34 @@ public class ClippingOp { return new CigarShift(cleanCigar, shiftFromStart, shiftFromEnd); } + /** + * Compute the offset of the first "real" position in the cigar on the genome + * + * This is defined as a first position after a run of Hs followed by a run of Ss + * + * @param cigar A non-null cigar + * @return the offset (from 0) of the first on-genome base + */ + private int calcHardSoftOffset(final Cigar cigar) { + final List elements = cigar.getCigarElements(); + + int size = 0; + int i = 0; + while ( i < elements.size() && elements.get(i).getOperator() == CigarOperator.HARD_CLIP ) { + size += elements.get(i).getLength(); + i++; + } + while ( i < elements.size() && elements.get(i).getOperator() == CigarOperator.SOFT_CLIP ) { + size += elements.get(i).getLength(); + i++; + } + + return size; + } + private int calculateAlignmentStartShift(Cigar oldCigar, Cigar newCigar) { - int newShift = 0; - int oldShift = 0; - - boolean readHasStarted = false; // if the new cigar is composed of S and H only, we have to traverse the entire old cigar to calculate the shift - for (CigarElement cigarElement : newCigar.getCigarElements()) { - if (cigarElement.getOperator() == CigarOperator.HARD_CLIP || cigarElement.getOperator() == CigarOperator.SOFT_CLIP) - newShift += cigarElement.getLength(); - else { - readHasStarted = true; - break; - } - } - - for (CigarElement cigarElement : oldCigar.getCigarElements()) { - if (cigarElement.getOperator() == CigarOperator.HARD_CLIP || cigarElement.getOperator() == CigarOperator.SOFT_CLIP) - oldShift += cigarElement.getLength(); - else if (readHasStarted) - break; - } + final int newShift = calcHardSoftOffset(newCigar); + final int oldShift = calcHardSoftOffset(oldCigar); return newShift - oldShift; } diff --git a/public/java/src/org/broadinstitute/sting/utils/exceptions/UserException.java b/public/java/src/org/broadinstitute/sting/utils/exceptions/UserException.java index 3abe5a7f4..6126116c2 100644 --- a/public/java/src/org/broadinstitute/sting/utils/exceptions/UserException.java +++ b/public/java/src/org/broadinstitute/sting/utils/exceptions/UserException.java @@ -25,6 +25,7 @@ package org.broadinstitute.sting.utils.exceptions; +import net.sf.samtools.CigarOperator; import net.sf.samtools.SAMFileHeader; import net.sf.samtools.SAMRecord; import net.sf.samtools.SAMSequenceDictionary; @@ -87,6 +88,19 @@ public class UserException extends ReviewedStingException { } } + public static class UnsupportedCigarOperatorException extends UserException { + public UnsupportedCigarOperatorException(final CigarOperator co, final SAMRecord read, final String message) { + super(String.format( + "Unsupported CIGAR operator %s in read %s at %s:%d. %s", + co, + read.getReadName(), + read.getReferenceName(), + read.getAlignmentStart(), + message)); + } + } + + public static class MalformedGenomeLoc extends UserException { public MalformedGenomeLoc(String message, GenomeLoc loc) { super(String.format("Badly formed genome loc: %s: %s", message, loc)); @@ -457,4 +471,10 @@ public class UserException extends ReviewedStingException { super(message,innerException); } } + + public static class IncompatibleRecalibrationTableParameters extends UserException { + public IncompatibleRecalibrationTableParameters(String s) { + super(s); + } + } } diff --git a/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java b/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java index 150e24c51..70be85f54 100644 --- a/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java +++ b/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java @@ -42,13 +42,13 @@ import java.util.*; * For each read, this holds underlying alleles represented by an aligned read, and corresponding relative likelihood. */ public class PerReadAlleleLikelihoodMap { - protected final List alleles; - protected final Map> likelihoodReadMap; + /** A set of all of the allele, so we can efficiently determine if an allele is already present */ + private final Set allelesSet = new HashSet<>(); + /** A list of the unique allele, as an ArrayList so we can call get(i) efficiently */ + protected final List alleles = new ArrayList<>(); + protected final Map> likelihoodReadMap = new LinkedHashMap<>(); - public PerReadAlleleLikelihoodMap() { - likelihoodReadMap = new LinkedHashMap>(); - alleles = new ArrayList(); - } + public PerReadAlleleLikelihoodMap() { } /** * Add a new entry into the Read -> ( Allele -> Likelihood ) map of maps. @@ -61,18 +61,20 @@ public class PerReadAlleleLikelihoodMap { if ( a == null ) throw new IllegalArgumentException("Cannot add a null allele to the allele likelihood map"); if ( likelihood == null ) throw new IllegalArgumentException("Likelihood cannot be null"); if ( likelihood > 0.0 ) throw new IllegalArgumentException("Likelihood must be negative (L = log(p))"); + Map likelihoodMap = likelihoodReadMap.get(read); if (likelihoodMap == null){ // LinkedHashMap will ensure iterating through alleles will be in consistent order - likelihoodMap = new LinkedHashMap(); + likelihoodMap = new LinkedHashMap<>(); + likelihoodReadMap.put(read,likelihoodMap); } - likelihoodReadMap.put(read,likelihoodMap); likelihoodMap.put(a,likelihood); - if (!alleles.contains(a)) + if (!allelesSet.contains(a)) { + allelesSet.add(a); alleles.add(a); - + } } public ReadBackedPileup createPerAlleleDownsampledBasePileup(final ReadBackedPileup pileup, final double downsamplingFraction) { @@ -165,6 +167,7 @@ public class PerReadAlleleLikelihoodMap { } public void clear() { + allelesSet.clear(); alleles.clear(); likelihoodReadMap.clear(); } @@ -218,7 +221,7 @@ public class PerReadAlleleLikelihoodMap { final int count = ReadUtils.getMeanRepresentativeReadCount(read); final double likelihood_iii = entry.getValue().get(iii_allele); final double likelihood_jjj = entry.getValue().get(jjj_allele); - haplotypeLikelihood += count * (MathUtils.approximateLog10SumLog10(likelihood_iii, likelihood_jjj) + LOG_ONE_HALF); + haplotypeLikelihood += count * (MathUtils.approximateLog10SumLog10(likelihood_iii, likelihood_jjj) + MathUtils.LOG_ONE_HALF); // fast exit. If this diploid pair is already worse than the max, just stop and look at the next pair if ( haplotypeLikelihood < maxElement ) break; @@ -238,7 +241,6 @@ public class PerReadAlleleLikelihoodMap { return new MostLikelyAllele(alleles.get(hap1), alleles.get(hap2), maxElement, maxElement); } - private static final double LOG_ONE_HALF = -Math.log10(2.0); /** * Given a map from alleles to likelihoods, find the allele with the largest likelihood. @@ -319,7 +321,7 @@ public class PerReadAlleleLikelihoodMap { * @return the list of reads removed from this map because they are poorly modelled */ public List filterPoorlyModelledReads(final double maxErrorRatePerBase) { - final List removedReads = new LinkedList(); + final List removedReads = new LinkedList<>(); final Iterator>> it = likelihoodReadMap.entrySet().iterator(); while ( it.hasNext() ) { final Map.Entry> record = it.next(); @@ -354,8 +356,8 @@ public class PerReadAlleleLikelihoodMap { * @return true if none of the log10 likelihoods imply that the read truly originated from one of the haplotypes */ protected boolean readIsPoorlyModelled(final GATKSAMRecord read, final Collection log10Likelihoods, final double maxErrorRatePerBase) { - final double maxErrorsForRead = Math.ceil(read.getReadLength() * maxErrorRatePerBase); - final double log10QualPerBase = -3.0; + final double maxErrorsForRead = Math.min(2.0, Math.ceil(read.getReadLength() * maxErrorRatePerBase)); + final double log10QualPerBase = -4.0; final double log10MaxLikelihoodForTrueAllele = maxErrorsForRead * log10QualPerBase; for ( final double log10Likelihood : log10Likelihoods ) @@ -364,4 +366,12 @@ public class PerReadAlleleLikelihoodMap { return true; } + + /** + * Get an unmodifiable set of the unique alleles in this PerReadAlleleLikelihoodMap + * @return a non-null unmodifiable map + */ + public Set getAllelesSet() { + return Collections.unmodifiableSet(allelesSet); + } } diff --git a/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java b/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java index bacee7942..1f932b222 100644 --- a/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java +++ b/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java @@ -46,7 +46,6 @@ public class Haplotype extends Allele { private EventMap eventMap = null; private Cigar cigar; private int alignmentStartHapwrtRef; - private Event artificialEvent = null; private double score = 0; /** @@ -93,11 +92,6 @@ public class Haplotype extends Allele { super(allele, true); } - protected Haplotype( final byte[] bases, final Event artificialEvent ) { - this(bases, false); - this.artificialEvent = artificialEvent; - } - public Haplotype( final byte[] bases, final GenomeLoc loc ) { this(bases, false); this.genomeLocation = loc; @@ -189,7 +183,7 @@ public class Haplotype extends Allele { } /** - * Get the cigar for this haplotype. Note that cigar is guarenteed to be consolidated + * Get the cigar for this haplotype. Note that the cigar is guaranteed to be consolidated * in that multiple adjacent equal operates will have been merged * @return the cigar of this haplotype */ @@ -223,30 +217,6 @@ public class Haplotype extends Allele { throw new IllegalArgumentException("Read length " + length() + " not equal to the read length of the cigar " + cigar.getReadLength()); } - public boolean isArtificialHaplotype() { - return artificialEvent != null; - } - - public Event getArtificialEvent() { - return artificialEvent; - } - - public Allele getArtificialRefAllele() { - return artificialEvent.ref; - } - - public Allele getArtificialAltAllele() { - return artificialEvent.alt; - } - - public int getArtificialAllelePosition() { - return artificialEvent.pos; - } - - public void setArtificialEvent( final Event artificialEvent ) { - this.artificialEvent = artificialEvent; - } - @Requires({"refInsertLocation >= 0"}) public Haplotype insertAllele( final Allele refAllele, final Allele altAllele, final int refInsertLocation, final int genomicInsertLocation ) { // refInsertLocation is in ref haplotype offset coordinates NOT genomic coordinates @@ -260,7 +230,7 @@ public class Haplotype extends Allele { newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, ArrayUtils.subarray(myBases, 0, haplotypeInsertLocation)); // bases before the variant newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, altAllele.getBases()); // the alt allele of the variant newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, ArrayUtils.subarray(myBases, haplotypeInsertLocation + refAllele.length(), myBases.length)); // bases after the variant - return new Haplotype(newHaplotypeBases, new Event(refAllele, altAllele, genomicInsertLocation)); + return new Haplotype(newHaplotypeBases); } public static LinkedHashMap makeHaplotypeListFromAlleles(final List alleleList, diff --git a/public/java/src/org/broadinstitute/sting/utils/help/DocletUtils.java b/public/java/src/org/broadinstitute/sting/utils/help/DocletUtils.java new file mode 100644 index 000000000..1e9a37cb7 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/help/DocletUtils.java @@ -0,0 +1,76 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.utils.help; + +import com.sun.javadoc.FieldDoc; +import com.sun.javadoc.PackageDoc; +import com.sun.javadoc.ProgramElementDoc; +import org.broadinstitute.sting.utils.classloader.JVMUtils; + +import java.lang.reflect.Field; + +/** + * Methods in the class must ONLY be used by doclets, since the com.sun.javadoc.* classes are not + * available on all systems, and we don't want the GATK proper to depend on them. + */ +public class DocletUtils { + + protected static boolean assignableToClass(ProgramElementDoc classDoc, Class lhsClass, boolean requireConcrete) { + try { + Class type = getClassForDoc(classDoc); + return lhsClass.isAssignableFrom(type) && (!requireConcrete || JVMUtils.isConcrete(type)); + } catch (Throwable t) { + // Ignore errors. + return false; + } + } + + protected static Class getClassForDoc(ProgramElementDoc doc) throws ClassNotFoundException { + return Class.forName(getClassName(doc)); + } + + protected static Field getFieldForFieldDoc(FieldDoc fieldDoc) { + try { + Class clazz = getClassForDoc(fieldDoc.containingClass()); + return JVMUtils.findField(clazz, fieldDoc.name()); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + + /** + * Reconstitute the class name from the given class JavaDoc object. + * + * @param doc the Javadoc model for the given class. + * @return The (string) class name of the given class. + */ + protected static String getClassName(ProgramElementDoc doc) { + PackageDoc containingPackage = doc.containingPackage(); + return containingPackage.name().length() > 0 ? + String.format("%s.%s", containingPackage.name(), doc.name()) : + String.format("%s", doc.name()); + } +} \ No newline at end of file diff --git a/public/java/src/org/broadinstitute/sting/utils/help/GATKDoclet.java b/public/java/src/org/broadinstitute/sting/utils/help/GATKDoclet.java index 677bbf2e5..63cb0900a 100644 --- a/public/java/src/org/broadinstitute/sting/utils/help/GATKDoclet.java +++ b/public/java/src/org/broadinstitute/sting/utils/help/GATKDoclet.java @@ -352,7 +352,7 @@ public class GATKDoclet { private Class getClassForClassDoc(ClassDoc doc) { try { // todo -- what do I need the ? extends Object to pass the compiler? - return (Class) HelpUtils.getClassForDoc(doc); + return (Class) DocletUtils.getClassForDoc(doc); } catch (ClassNotFoundException e) { //logger.warn("Couldn't find class for ClassDoc " + doc); // we got a classdoc for a class we can't find. Maybe in a library or something diff --git a/public/java/src/org/broadinstitute/sting/utils/help/GenericDocumentationHandler.java b/public/java/src/org/broadinstitute/sting/utils/help/GenericDocumentationHandler.java index 1711a3923..02c269495 100644 --- a/public/java/src/org/broadinstitute/sting/utils/help/GenericDocumentationHandler.java +++ b/public/java/src/org/broadinstitute/sting/utils/help/GenericDocumentationHandler.java @@ -68,7 +68,7 @@ public class GenericDocumentationHandler extends DocumentedGATKFeatureHandler { @Override public boolean includeInDocs(ClassDoc doc) { try { - Class type = HelpUtils.getClassForDoc(doc); + Class type = DocletUtils.getClassForDoc(doc); boolean hidden = !getDoclet().showHiddenFeatures() && type.isAnnotationPresent(Hidden.class); return !hidden && JVMUtils.isConcrete(type); } catch (ClassNotFoundException e) { @@ -157,7 +157,7 @@ public class GenericDocumentationHandler extends DocumentedGATKFeatureHandler { root.put("arguments", args); try { // loop over all of the arguments according to the parsing engine - for (final ArgumentSource argumentSource : parsingEngine.extractArgumentSources(HelpUtils.getClassForDoc(toProcess.classDoc))) { + for (final ArgumentSource argumentSource : parsingEngine.extractArgumentSources(DocletUtils.getClassForDoc(toProcess.classDoc))) { // todo -- why can you have multiple ones? ArgumentDefinition argDef = argumentSource.createArgumentDefinitions().get(0); FieldDoc fieldDoc = getFieldDoc(toProcess.classDoc, argumentSource.field.getName()); @@ -663,7 +663,7 @@ public class GenericDocumentationHandler extends DocumentedGATKFeatureHandler { if (fieldDoc.name().equals(name)) return fieldDoc; - Field field = HelpUtils.getFieldForFieldDoc(fieldDoc); + Field field = DocletUtils.getFieldForFieldDoc(fieldDoc); if (field == null) throw new RuntimeException("Could not find the field corresponding to " + fieldDoc + ", presumably because the field is inaccessible"); if (field.isAnnotationPresent(ArgumentCollection.class)) { diff --git a/public/java/src/org/broadinstitute/sting/utils/help/HelpUtils.java b/public/java/src/org/broadinstitute/sting/utils/help/HelpUtils.java index 9a23fd022..74516672d 100644 --- a/public/java/src/org/broadinstitute/sting/utils/help/HelpUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/help/HelpUtils.java @@ -25,57 +25,20 @@ package org.broadinstitute.sting.utils.help; -import com.sun.javadoc.FieldDoc; -import com.sun.javadoc.PackageDoc; -import com.sun.javadoc.ProgramElementDoc; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.AnnotationType; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.GenotypeAnnotation; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.InfoFieldAnnotation; import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnotation; -import org.broadinstitute.sting.utils.classloader.JVMUtils; import org.broadinstitute.sting.utils.classloader.PluginManager; -import java.lang.reflect.Field; import java.util.List; +/** + * NON-javadoc/doclet help-related utility methods should go here. Anything with a com.sun.javadoc.* dependency + * should go into DocletUtils for use only by doclets. + */ public class HelpUtils { - protected static boolean assignableToClass(ProgramElementDoc classDoc, Class lhsClass, boolean requireConcrete) { - try { - Class type = getClassForDoc(classDoc); - return lhsClass.isAssignableFrom(type) && (!requireConcrete || JVMUtils.isConcrete(type)); - } catch (Throwable t) { - // Ignore errors. - return false; - } - } - - protected static Class getClassForDoc(ProgramElementDoc doc) throws ClassNotFoundException { - return Class.forName(getClassName(doc)); - } - - protected static Field getFieldForFieldDoc(FieldDoc fieldDoc) { - try { - Class clazz = getClassForDoc(fieldDoc.containingClass()); - return JVMUtils.findField(clazz, fieldDoc.name()); - } catch (ClassNotFoundException e) { - throw new RuntimeException(e); - } - } - - /** - * Reconstitute the class name from the given class JavaDoc object. - * - * @param doc the Javadoc model for the given class. - * @return The (string) class name of the given class. - */ - protected static String getClassName(ProgramElementDoc doc) { - PackageDoc containingPackage = doc.containingPackage(); - return containingPackage.name().length() > 0 ? - String.format("%s.%s", containingPackage.name(), doc.name()) : - String.format("%s", doc.name()); - } - /** * Simple method to print a list of available annotations. */ @@ -98,5 +61,4 @@ public class HelpUtils { System.out.println("\t" + c.getSimpleName()); System.out.println(); } - -} \ No newline at end of file +} diff --git a/public/java/src/org/broadinstitute/sting/utils/help/ResourceBundleExtractorDoclet.java b/public/java/src/org/broadinstitute/sting/utils/help/ResourceBundleExtractorDoclet.java index 0f2383b4b..ac85d7aff 100644 --- a/public/java/src/org/broadinstitute/sting/utils/help/ResourceBundleExtractorDoclet.java +++ b/public/java/src/org/broadinstitute/sting/utils/help/ResourceBundleExtractorDoclet.java @@ -108,7 +108,7 @@ public class ResourceBundleExtractorDoclet { if(isRequiredJavadocMissing(currentClass) && isWalker(currentClass)) undocumentedWalkers.add(currentClass.name()); - renderHelpText(HelpUtils.getClassName(currentClass),currentClass); + renderHelpText(DocletUtils.getClassName(currentClass),currentClass); } for(PackageDoc currentPackage: packages) @@ -173,7 +173,7 @@ public class ResourceBundleExtractorDoclet { * @return True if the class of the given name is a walker. False otherwise. */ protected static boolean isWalker(ClassDoc classDoc) { - return HelpUtils.assignableToClass(classDoc, Walker.class, true); + return DocletUtils.assignableToClass(classDoc, Walker.class, true); } /** diff --git a/public/java/src/org/broadinstitute/sting/utils/locusiterator/AlignmentStateMachine.java b/public/java/src/org/broadinstitute/sting/utils/locusiterator/AlignmentStateMachine.java index c4b566582..86f3500be 100644 --- a/public/java/src/org/broadinstitute/sting/utils/locusiterator/AlignmentStateMachine.java +++ b/public/java/src/org/broadinstitute/sting/utils/locusiterator/AlignmentStateMachine.java @@ -123,6 +123,15 @@ public class AlignmentStateMachine { return getRead().getReferenceIndex(); } + /** + * Is our read a reduced read? + * + * @return true if the read we encapsulate is a reduced read, otherwise false + */ + public boolean isReducedRead() { + return read.isReducedRead(); + } + /** * Is this the left edge state? I.e., one that is before or after the current read? * @return true if this state is an edge state, false otherwise diff --git a/public/java/src/org/broadinstitute/sting/utils/locusiterator/PerSampleReadStateManager.java b/public/java/src/org/broadinstitute/sting/utils/locusiterator/PerSampleReadStateManager.java index 2caaf9d27..669e76adc 100644 --- a/public/java/src/org/broadinstitute/sting/utils/locusiterator/PerSampleReadStateManager.java +++ b/public/java/src/org/broadinstitute/sting/utils/locusiterator/PerSampleReadStateManager.java @@ -167,7 +167,7 @@ final class PerSampleReadStateManager implements Iterable // use returned List directly rather than make a copy, for efficiency's sake readStatesByAlignmentStart = flattenByAlignmentStart(levelingDownsampler.consumeFinalizedItems()); - levelingDownsampler.reset(); + levelingDownsampler.resetStats(); } return nStatesAdded; diff --git a/public/java/src/org/broadinstitute/sting/utils/locusiterator/SamplePartitioner.java b/public/java/src/org/broadinstitute/sting/utils/locusiterator/SamplePartitioner.java index 49a8d10aa..9122beebb 100644 --- a/public/java/src/org/broadinstitute/sting/utils/locusiterator/SamplePartitioner.java +++ b/public/java/src/org/broadinstitute/sting/utils/locusiterator/SamplePartitioner.java @@ -164,8 +164,8 @@ class SamplePartitioner { @Ensures("doneSubmittingReads == false") public void reset() { for ( final Downsampler downsampler : readsBySample.values() ) { - downsampler.clear(); - downsampler.reset(); + downsampler.clearItems(); + downsampler.resetStats(); } doneSubmittingReads = false; } diff --git a/public/java/src/org/broadinstitute/sting/utils/pairhmm/Log10PairHMM.java b/public/java/src/org/broadinstitute/sting/utils/pairhmm/Log10PairHMM.java index ab6c321e8..ddc1a4559 100644 --- a/public/java/src/org/broadinstitute/sting/utils/pairhmm/Log10PairHMM.java +++ b/public/java/src/org/broadinstitute/sting/utils/pairhmm/Log10PairHMM.java @@ -38,7 +38,7 @@ import java.util.Arrays; * User: rpoplin, carneiro * Date: 3/1/12 */ -public final class Log10PairHMM extends PairHMM { +public final class Log10PairHMM extends N2MemoryPairHMM { /** * Should we use exact log10 calculation (true), or an approximation (false)? */ diff --git a/public/java/src/org/broadinstitute/sting/utils/pairhmm/N2MemoryPairHMM.java b/public/java/src/org/broadinstitute/sting/utils/pairhmm/N2MemoryPairHMM.java new file mode 100644 index 000000000..a091a0716 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/pairhmm/N2MemoryPairHMM.java @@ -0,0 +1,91 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.utils.pairhmm; + +import com.google.java.contract.Requires; +import org.apache.log4j.Logger; +import org.broadinstitute.sting.utils.MathUtils; + +import java.util.Arrays; + +/** + * Superclass for PairHMM that want to use a full read x haplotype matrix for their match, insertion, and deletion matrix + * + * User: rpoplin + * Date: 10/16/12 + */ +abstract class N2MemoryPairHMM extends PairHMM { + protected double[][] transition = null; // The transition probabilities cache + protected double[][] prior = null; // The prior probabilities cache + protected double[][] matchMatrix = null; + protected double[][] insertionMatrix = null; + protected double[][] deletionMatrix = null; + + /** + * Initialize this PairHMM, making it suitable to run against a read and haplotype with given lengths + * + * Note: Do not worry about padding, just provide the true max length of the read and haplotype. The HMM will take care of the padding. + * + * @param haplotypeMaxLength the max length of haplotypes we want to use with this PairHMM + * @param readMaxLength the max length of reads we want to use with this PairHMM + */ + public void initialize( final int readMaxLength, final int haplotypeMaxLength ) { + super.initialize(readMaxLength, haplotypeMaxLength); + + matchMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength]; + insertionMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength]; + deletionMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength]; + } + + /** + * Print out the core hmm matrices for debugging + */ + protected void dumpMatrices() { + dumpMatrix("matchMetricArray", matchMatrix); + dumpMatrix("insertionMatrix", insertionMatrix); + dumpMatrix("deletionMatrix", deletionMatrix); + } + + /** + * Print out in a human readable form the matrix for debugging + * @param name the name of this matrix + * @param matrix the matrix of values + */ + @Requires({"name != null", "matrix != null"}) + private void dumpMatrix(final String name, final double[][] matrix) { + System.out.printf("%s%n", name); + for ( int i = 0; i < matrix.length; i++) { + System.out.printf("\t%s[%d]", name, i); + for ( int j = 0; j < matrix[i].length; j++ ) { + if ( Double.isInfinite(matrix[i][j]) ) + System.out.printf(" %15s", String.format("%f", matrix[i][j])); + else + System.out.printf(" % 15.5e", matrix[i][j]); + } + System.out.println(); + } + } +} diff --git a/public/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMM.java b/public/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMM.java index 6b57a1354..85ac97f95 100644 --- a/public/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMM.java +++ b/public/java/src/org/broadinstitute/sting/utils/pairhmm/PairHMM.java @@ -40,8 +40,6 @@ import java.util.Arrays; public abstract class PairHMM { protected final static Logger logger = Logger.getLogger(PairHMM.class); - protected double[][] transition = null; // The transition probabilities cache - protected double[][] prior = null; // The prior probabilities cache protected boolean constantsAreInitialized = false; protected byte[] previousHaplotypeBases; @@ -52,12 +50,9 @@ public abstract class PairHMM { /* PairHMM as implemented for the UnifiedGenotyper. Uses log10 sum functions accurate to only 1E-4 */ ORIGINAL, /* Optimized version of the PairHMM which caches per-read computations and operations in real space to avoid costly sums of log10'ed likelihoods */ - LOGLESS_CACHING + LOGLESS_CACHING, } - protected double[][] matchMatrix = null; - protected double[][] insertionMatrix = null; - protected double[][] deletionMatrix = null; protected int maxHaplotypeLength, maxReadLength; protected int paddedMaxReadLength, paddedMaxHaplotypeLength; protected int paddedReadLength, paddedHaplotypeLength; @@ -82,18 +77,12 @@ public abstract class PairHMM { paddedMaxReadLength = readMaxLength + 1; paddedMaxHaplotypeLength = haplotypeMaxLength + 1; - matchMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength]; - insertionMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength]; - deletionMatrix = new double[paddedMaxReadLength][paddedMaxHaplotypeLength]; - previousHaplotypeBases = null; constantsAreInitialized = false; initialized = true; } - - /** * Compute the total probability of read arising from haplotypeBases given base substitution, insertion, and deletion * probabilities. @@ -152,44 +141,15 @@ public abstract class PairHMM { * To be overloaded by subclasses to actually do calculation for #computeReadLikelihoodGivenHaplotypeLog10 */ @Requires({"readBases.length == readQuals.length", "readBases.length == insertionGOP.length", "readBases.length == deletionGOP.length", - "readBases.length == overallGCP.length", "matchMatrix!=null", "insertionMatrix!=null", "deletionMatrix!=null"}) + "readBases.length == overallGCP.length", "matchMatrix!=null", "insertionMatrix!=null", "deletionMatrix!=null"}) protected abstract double subComputeReadLikelihoodGivenHaplotypeLog10( final byte[] haplotypeBases, - final byte[] readBases, - final byte[] readQuals, - final byte[] insertionGOP, - final byte[] deletionGOP, - final byte[] overallGCP, - final int hapStartIndex, - final boolean recacheReadValues ); - - /** - * Print out the core hmm matrices for debugging - */ - protected void dumpMatrices() { - dumpMatrix("matchMetricArray", matchMatrix); - dumpMatrix("insertionMatrix", insertionMatrix); - dumpMatrix("deletionMatrix", deletionMatrix); - } - - /** - * Print out in a human readable form the matrix for debugging - * @param name the name of this matrix - * @param matrix the matrix of values - */ - @Requires({"name != null", "matrix != null"}) - private void dumpMatrix(final String name, final double[][] matrix) { - System.out.printf("%s%n", name); - for ( int i = 0; i < matrix.length; i++) { - System.out.printf("\t%s[%d]", name, i); - for ( int j = 0; j < matrix[i].length; j++ ) { - if ( Double.isInfinite(matrix[i][j]) ) - System.out.printf(" %15s", String.format("%f", matrix[i][j])); - else - System.out.printf(" % 15.5e", matrix[i][j]); - } - System.out.println(); - } - } + final byte[] readBases, + final byte[] readQuals, + final byte[] insertionGOP, + final byte[] deletionGOP, + final byte[] overallGCP, + final int hapStartIndex, + final boolean recacheReadValues ); /** * Compute the first position at which two haplotypes differ diff --git a/public/java/src/org/broadinstitute/sting/utils/runtime/ProcessSettings.java b/public/java/src/org/broadinstitute/sting/utils/runtime/ProcessSettings.java index 8aafd6034..659523641 100644 --- a/public/java/src/org/broadinstitute/sting/utils/runtime/ProcessSettings.java +++ b/public/java/src/org/broadinstitute/sting/utils/runtime/ProcessSettings.java @@ -25,7 +25,6 @@ package org.broadinstitute.sting.utils.runtime; -import com.sun.corba.se.spi.orbutil.fsm.Input; import java.io.File; import java.util.Map; diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java b/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java index fa35e3f53..762ce4858 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java @@ -800,6 +800,23 @@ public final class AlignmentUtils { return new Cigar(elements); } + /** + * Removing a trailing deletion from the incoming cigar if present + * + * @param c the cigar we want to update + * @return a non-null Cigar + */ + @Requires("c != null") + @Ensures("result != null") + public static Cigar removeTrailingDeletions(final Cigar c) { + + final List elements = c.getCigarElements(); + if ( elements.get(elements.size() - 1).getOperator() != CigarOperator.D ) + return c; + + return new Cigar(elements.subList(0, elements.size() - 1)); + } + /** * Move the indel in a given cigar string one base to the left * diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialBAMBuilder.java b/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialBAMBuilder.java index bf3045c71..8d496ab96 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialBAMBuilder.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialBAMBuilder.java @@ -182,6 +182,11 @@ public class ArtificialBAMBuilder { try { final File file = File.createTempFile("tempBAM", ".bam"); file.deleteOnExit(); + + // Register the bam index file for deletion on exit as well: + new File(file.getAbsolutePath().replace(".bam", ".bai")).deleteOnExit(); + new File(file.getAbsolutePath() + ".bai").deleteOnExit(); + return makeBAMFile(file); } catch ( IOException e ) { throw new RuntimeException(e); diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialSAMUtils.java b/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialSAMUtils.java index b8367a7df..055f8630b 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialSAMUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/ArtificialSAMUtils.java @@ -324,6 +324,31 @@ public class ArtificialSAMUtils { return Arrays.asList(left, right); } + /** + * Create an artificial reduced read based on the parameters. The cigar string will be *M, where * is the + * length of the read. The base counts specified in the baseCounts array will be stored fully encoded in + * the RR attribute. + * + * @param header the SAM header to associate the read with + * @param name the name of the read + * @param refIndex the reference index, i.e. what chromosome to associate it with + * @param alignmentStart where to start the alignment + * @param length the length of the read + * @param baseCounts reduced base counts to encode in the RR attribute; length must match the read length + * @return the artificial reduced read + */ + public static GATKSAMRecord createArtificialReducedRead( final SAMFileHeader header, + final String name, + final int refIndex, + final int alignmentStart, + final int length, + final int[] baseCounts ) { + final GATKSAMRecord read = createArtificialRead(header, name, refIndex, alignmentStart, length); + read.setReducedReadCounts(baseCounts); + read.setReducedReadCountsTag(); + return read; + } + /** * Create a collection of identical artificial reads based on the parameters. The cigar string for each * read will be *M, where * is the length of the read. diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java b/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java index 0db3aa043..f9393cc4b 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/ReadUtils.java @@ -36,6 +36,7 @@ import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.NGSPlatform; import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; +import org.broadinstitute.sting.utils.exceptions.UserException; import java.io.File; import java.util.*; @@ -152,11 +153,18 @@ public class ReadUtils { * @return a SAMFileWriter with the compression level if it is a bam. */ public static SAMFileWriter createSAMFileWriterWithCompression(SAMFileHeader header, boolean presorted, String file, int compression) { + validateCompressionLevel(compression); if (file.endsWith(".bam")) return new SAMFileWriterFactory().makeBAMWriter(header, presorted, new File(file), compression); return new SAMFileWriterFactory().makeSAMOrBAMWriter(header, presorted, new File(file)); } + public static int validateCompressionLevel(final int requestedCompressionLevel) { + if ( requestedCompressionLevel < 0 || requestedCompressionLevel > 9 ) + throw new UserException.BadArgumentValue("compress", "Compression level must be 0-9 but got " + requestedCompressionLevel); + return requestedCompressionLevel; + } + /** * is this base inside the adaptor of the read? * @@ -424,9 +432,9 @@ public class ReadUtils { // clipping the left tail and first base is insertion, go to the next read coordinate // with the same reference coordinate. Advance to the next cigar element, or to the // end of the read if there is no next element. - Pair firstElementIsInsertion = readStartsWithInsertion(cigar); - if (readCoord == 0 && tail == ClippingTail.LEFT_TAIL && firstElementIsInsertion.getFirst()) - readCoord = Math.min(firstElementIsInsertion.getSecond().getLength(), cigar.getReadLength() - 1); + final CigarElement firstElementIsInsertion = readStartsWithInsertion(cigar); + if (readCoord == 0 && tail == ClippingTail.LEFT_TAIL && firstElementIsInsertion != null) + readCoord = Math.min(firstElementIsInsertion.getLength(), cigar.getReadLength() - 1); return readCoord; } @@ -595,25 +603,28 @@ public class ReadUtils { } /** - * Checks if a read starts with an insertion. It looks beyond Hard and Soft clips - * if there are any. - * - * @param read - * @return A pair with the answer (true/false) and the element or null if it doesn't exist + * @see #readStartsWithInsertion(net.sf.samtools.Cigar, boolean) with ignoreClipOps set to true */ - public static Pair readStartsWithInsertion(GATKSAMRecord read) { - return readStartsWithInsertion(read.getCigar()); + public static CigarElement readStartsWithInsertion(final Cigar cigarForRead) { + return readStartsWithInsertion(cigarForRead, true); } - public static Pair readStartsWithInsertion(final Cigar cigar) { - for (CigarElement cigarElement : cigar.getCigarElements()) { - if (cigarElement.getOperator() == CigarOperator.INSERTION) - return new Pair(true, cigarElement); + /** + * Checks if a read starts with an insertion. + * + * @param cigarForRead the CIGAR to evaluate + * @param ignoreSoftClipOps should we ignore S operators when evaluating whether an I operator is at the beginning? Note that H operators are always ignored. + * @return the element if it's a leading insertion or null otherwise + */ + public static CigarElement readStartsWithInsertion(final Cigar cigarForRead, final boolean ignoreSoftClipOps) { + for ( final CigarElement cigarElement : cigarForRead.getCigarElements() ) { + if ( cigarElement.getOperator() == CigarOperator.INSERTION ) + return cigarElement; - else if (cigarElement.getOperator() != CigarOperator.HARD_CLIP && cigarElement.getOperator() != CigarOperator.SOFT_CLIP) + else if ( cigarElement.getOperator() != CigarOperator.HARD_CLIP && ( !ignoreSoftClipOps || cigarElement.getOperator() != CigarOperator.SOFT_CLIP) ) break; } - return new Pair(false, null); + return null; } /** diff --git a/public/java/src/org/broadinstitute/sting/utils/smithwaterman/SWPairwiseAlignment.java b/public/java/src/org/broadinstitute/sting/utils/smithwaterman/SWPairwiseAlignment.java index 84c33d4a5..1abf9f836 100644 --- a/public/java/src/org/broadinstitute/sting/utils/smithwaterman/SWPairwiseAlignment.java +++ b/public/java/src/org/broadinstitute/sting/utils/smithwaterman/SWPairwiseAlignment.java @@ -118,6 +118,21 @@ public class SWPairwiseAlignment implements SmithWaterman { align(seq1,seq2); } + /** + * Create a new SW pairwise aligner + * + * After creating the object the two sequences are aligned with an internal call to align(seq1, seq2) + * + * @param seq1 the first sequence we want to align + * @param seq2 the second sequence we want to align + * @param strategy the overhang strategy to use + */ + public SWPairwiseAlignment(final byte[] seq1, final byte[] seq2, final OVERHANG_STRATEGY strategy) { + this(SWParameterSet.ORIGINAL_DEFAULT.parameters); + overhang_strategy = strategy; + align(seq1, seq2); + } + /** * Create a new SW pairwise aligner, without actually doing any alignment yet * diff --git a/public/java/src/org/broadinstitute/sting/utils/variant/GATKVCFUtils.java b/public/java/src/org/broadinstitute/sting/utils/variant/GATKVCFUtils.java index 0fba432e7..09db585a6 100644 --- a/public/java/src/org/broadinstitute/sting/utils/variant/GATKVCFUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/variant/GATKVCFUtils.java @@ -30,10 +30,10 @@ import org.broad.tribble.FeatureCodec; import org.broad.tribble.FeatureCodecHeader; import org.broad.tribble.readers.PositionalBufferedStream; import org.broadinstitute.sting.commandline.RodBinding; +import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource; import org.broadinstitute.sting.utils.collections.Pair; -import org.broadinstitute.variant.bcf2.BCF2Codec; import org.broadinstitute.variant.variantcontext.VariantContext; import org.broadinstitute.variant.vcf.*; @@ -52,6 +52,31 @@ public class GATKVCFUtils { */ private GATKVCFUtils() { } + public final static String GATK_COMMAND_LINE_KEY = "GATKCommandLine"; + + /** + * Gets the appropriately formatted header for a VCF file describing this GATK run + * + * @param engine the GATK engine that holds the walker name, GATK version, and other information + * @param argumentSources contains information on the argument values provided to the GATK for converting to a + * command line string. Should be provided from the data in the parsing engine. Can be + * empty in which case the command line will be the empty string. + * @return VCF header line describing this run of the GATK. + */ + public static VCFHeaderLine getCommandLineArgumentHeaderLine(final GenomeAnalysisEngine engine, final Collection argumentSources) { + if ( engine == null ) throw new IllegalArgumentException("engine cannot be null"); + if ( argumentSources == null ) throw new IllegalArgumentException("argumentSources cannot be null"); + + final Map attributes = new LinkedHashMap<>(); + attributes.put("ID", engine.getWalkerName()); + attributes.put("Version", CommandLineGATK.getVersionNumber()); + final Date date = new Date(); + attributes.put("Date", date.toString()); + attributes.put("Epoch", Long.toString(date.getTime())); + attributes.put("CommandLineOptions", engine.createApproximateCommandLineArgumentString(argumentSources.toArray())); + return new VCFSimpleHeaderLine(GATK_COMMAND_LINE_KEY, attributes, Collections.emptyList()); + } + public static Map getVCFHeadersFromRods(GenomeAnalysisEngine toolkit, List> rodBindings) { // Collect the eval rod names final Set names = new TreeSet(); @@ -149,21 +174,6 @@ public class GATKVCFUtils { return VCFUtils.withUpdatedContigs(header, engine.getArguments().referenceFile, engine.getMasterSequenceDictionary()); } - public static String rsIDOfFirstRealVariant(List VCs, VariantContext.Type type) { - if ( VCs == null ) - return null; - - String rsID = null; - for ( VariantContext vc : VCs ) { - if ( vc.getType() == type ) { - rsID = vc.getID(); - break; - } - } - - return rsID; - } - /** * Utility class to read all of the VC records from a file * diff --git a/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java b/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java index 4565402b9..3bc5da82f 100644 --- a/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java @@ -45,7 +45,7 @@ public class GATKVariantContextUtils { public static final int DEFAULT_PLOIDY = 2; public static final double SUM_GL_THRESH_NOCALL = -0.1; // if sum(gl) is bigger than this threshold, we treat GL's as non-informative and will force a no-call. - private static final List NO_CALL_ALLELES = Arrays.asList(Allele.NO_CALL, Allele.NO_CALL); + protected static final List NO_CALL_ALLELES = Arrays.asList(Allele.NO_CALL, Allele.NO_CALL); public final static String MERGE_FILTER_PREFIX = "filterIn"; public final static String MERGE_REF_IN_ALL = "ReferenceInAll"; public final static String MERGE_FILTER_IN_ALL = "FilteredInAll"; @@ -421,6 +421,37 @@ public class GATKVariantContextUtils { return true; // we passed all tests, we matched } + public enum GenotypeAssignmentMethod { + /** + * set all of the genotype GT values to NO_CALL + */ + SET_TO_NO_CALL, + + /** + * Use the subsetted PLs to greedily assigned genotypes + */ + USE_PLS_TO_ASSIGN, + + /** + * Try to match the original GT calls, if at all possible + * + * Suppose I have 3 alleles: A/B/C and the following samples: + * + * original_GT best_match to A/B best_match to A/C + * S1 => A/A A/A A/A + * S2 => A/B A/B A/A + * S3 => B/B B/B A/A + * S4 => B/C A/B A/C + * S5 => C/C A/A C/C + * + * Basically, all alleles not in the subset map to ref. It means that het-alt genotypes + * when split into 2 bi-allelic variants will be het in each, which is good in some cases, + * rather than the undetermined behavior when using the PLs to assign, which could result + * in hom-var or hom-ref for each, depending on the exact PL values. + */ + BEST_MATCH_TO_ORIGINAL + } + /** * subset the Variant Context to the specific set of alleles passed in (pruning the PLs appropriately) * @@ -430,22 +461,23 @@ public class GATKVariantContextUtils { * @return genotypes */ public static GenotypesContext subsetDiploidAlleles(final VariantContext vc, - final List allelesToUse, - final boolean assignGenotypes) { + final List allelesToUse, + final GenotypeAssignmentMethod assignGenotypes) { + if ( allelesToUse.get(0).isNonReference() ) throw new IllegalArgumentException("First allele must be the reference allele"); + if ( allelesToUse.size() == 1 ) throw new IllegalArgumentException("Cannot subset to only 1 alt allele"); // the genotypes with PLs final GenotypesContext oldGTs = vc.getGenotypes(); // the new genotypes to create final GenotypesContext newGTs = GenotypesContext.create(); + // optimization: if no input genotypes, just exit - if (oldGTs.isEmpty()) - return newGTs; + if (oldGTs.isEmpty()) return newGTs; // samples final List sampleIndices = oldGTs.getSampleNamesOrderedByName(); - // we need to determine which of the alternate alleles (and hence the likelihoods) to use and carry forward final int numOriginalAltAlleles = vc.getAlternateAlleles().size(); final int expectedNumLikelihoods = GenotypeLikelihoods.numLikelihoods(vc.getNAlleles(), 2); @@ -456,8 +488,8 @@ public class GATKVariantContextUtils { // an optimization: if we are supposed to use all (or none in the case of a ref call) of the alleles, // then we can keep the PLs as is; otherwise, we determine which ones to keep - if ( numNewAltAlleles != numOriginalAltAlleles && numNewAltAlleles > 0 ) { - likelihoodIndexesToUse = new ArrayList(30); + if ( numNewAltAlleles != numOriginalAltAlleles ) { + likelihoodIndexesToUse = new ArrayList<>(30); final boolean[] altAlleleIndexToUse = new boolean[numOriginalAltAlleles]; for ( int i = 0; i < numOriginalAltAlleles; i++ ) { @@ -478,55 +510,127 @@ public class GATKVariantContextUtils { // create the new genotypes for ( int k = 0; k < oldGTs.size(); k++ ) { final Genotype g = oldGTs.get(sampleIndices.get(k)); - if ( !g.hasLikelihoods() ) { - newGTs.add(GenotypeBuilder.create(g.getSampleName(), NO_CALL_ALLELES)); - continue; - } + final GenotypeBuilder gb = new GenotypeBuilder(g); // create the new likelihoods array from the alleles we are allowed to use - final double[] originalLikelihoods = g.getLikelihoods().getAsVector(); double[] newLikelihoods; - if ( likelihoodIndexesToUse == null ) { - newLikelihoods = originalLikelihoods; - } else if ( originalLikelihoods.length != expectedNumLikelihoods ) { - logger.warn("Wrong number of likelihoods in sample " + g.getSampleName() + " at " + vc + " got " + g.getLikelihoodsString() + " but expected " + expectedNumLikelihoods); + if ( !g.hasLikelihoods() ) { + // we don't have any likelihoods, so we null out PLs and make G ./. newLikelihoods = null; + gb.noPL(); } else { - newLikelihoods = new double[likelihoodIndexesToUse.size()]; - int newIndex = 0; - for ( int oldIndex : likelihoodIndexesToUse ) - newLikelihoods[newIndex++] = originalLikelihoods[oldIndex]; + final double[] originalLikelihoods = g.getLikelihoods().getAsVector(); + if ( likelihoodIndexesToUse == null ) { + newLikelihoods = originalLikelihoods; + } else if ( originalLikelihoods.length != expectedNumLikelihoods ) { + logger.warn("Wrong number of likelihoods in sample " + g.getSampleName() + " at " + vc + " got " + g.getLikelihoodsString() + " but expected " + expectedNumLikelihoods); + newLikelihoods = null; + } else { + newLikelihoods = new double[likelihoodIndexesToUse.size()]; + int newIndex = 0; + for ( int oldIndex : likelihoodIndexesToUse ) + newLikelihoods[newIndex++] = originalLikelihoods[oldIndex]; - // might need to re-normalize - newLikelihoods = MathUtils.normalizeFromLog10(newLikelihoods, false, true); - } + // might need to re-normalize + newLikelihoods = MathUtils.normalizeFromLog10(newLikelihoods, false, true); + } - // if there is no mass on the (new) likelihoods, then just no-call the sample - if ( newLikelihoods != null && MathUtils.sum(newLikelihoods) > SUM_GL_THRESH_NOCALL ) { - newGTs.add(GenotypeBuilder.create(g.getSampleName(), NO_CALL_ALLELES)); - } - else { - final GenotypeBuilder gb = new GenotypeBuilder(g); - - if ( newLikelihoods == null || numNewAltAlleles == 0 ) + if ( newLikelihoods == null || likelihoodsAreUninformative(newLikelihoods) ) gb.noPL(); else gb.PL(newLikelihoods); - - // if we weren't asked to assign a genotype, then just no-call the sample - if ( !assignGenotypes || MathUtils.sum(newLikelihoods) > SUM_GL_THRESH_NOCALL ) { - gb.alleles(NO_CALL_ALLELES); - } - else { - // find the genotype with maximum likelihoods - int PLindex = numNewAltAlleles == 0 ? 0 : MathUtils.maxElementIndex(newLikelihoods); - GenotypeLikelihoods.GenotypeLikelihoodsAllelePair alleles = GenotypeLikelihoods.getAllelePair(PLindex); - - gb.alleles(Arrays.asList(allelesToUse.get(alleles.alleleIndex1), allelesToUse.get(alleles.alleleIndex2))); - if ( numNewAltAlleles != 0 ) gb.log10PError(GenotypeLikelihoods.getGQLog10FromLikelihoods(PLindex, newLikelihoods)); - } - newGTs.add(gb.make()); } + + updateGenotypeAfterSubsetting(g.getAlleles(), gb, assignGenotypes, newLikelihoods, allelesToUse); + newGTs.add(gb.make()); + } + + return newGTs; + } + + private static boolean likelihoodsAreUninformative(final double[] likelihoods) { + return MathUtils.sum(likelihoods) > SUM_GL_THRESH_NOCALL; + } + + /** + * Add the genotype call (GT) field to GenotypeBuilder using the requested algorithm assignmentMethod + * + * @param originalGT the original genotype calls, cannot be null + * @param gb the builder where we should put our newly called alleles, cannot be null + * @param assignmentMethod the method to use to do the assignment, cannot be null + * @param newLikelihoods a vector of likelihoods to use if the method requires PLs, should be log10 likelihoods, cannot be null + * @param allelesToUse the alleles we are using for our subsetting + */ + protected static void updateGenotypeAfterSubsetting(final List originalGT, + final GenotypeBuilder gb, + final GenotypeAssignmentMethod assignmentMethod, + final double[] newLikelihoods, + final List allelesToUse) { + gb.noAD(); + switch ( assignmentMethod ) { + case SET_TO_NO_CALL: + gb.alleles(NO_CALL_ALLELES); + gb.noGQ(); + break; + case USE_PLS_TO_ASSIGN: + if ( newLikelihoods == null || likelihoodsAreUninformative(newLikelihoods) ) { + // if there is no mass on the (new) likelihoods, then just no-call the sample + gb.alleles(NO_CALL_ALLELES); + gb.noGQ(); + } else { + // find the genotype with maximum likelihoods + final int PLindex = MathUtils.maxElementIndex(newLikelihoods); + GenotypeLikelihoods.GenotypeLikelihoodsAllelePair alleles = GenotypeLikelihoods.getAllelePair(PLindex); + gb.alleles(Arrays.asList(allelesToUse.get(alleles.alleleIndex1), allelesToUse.get(alleles.alleleIndex2))); + gb.log10PError(GenotypeLikelihoods.getGQLog10FromLikelihoods(PLindex, newLikelihoods)); + } + break; + case BEST_MATCH_TO_ORIGINAL: + final List best = new LinkedList<>(); + final Allele ref = allelesToUse.get(0); // WARNING -- should be checked in input argument + for ( final Allele originalAllele : originalGT ) { + best.add(allelesToUse.contains(originalAllele) ? originalAllele : ref); + } + gb.noGQ(); + gb.noPL(); + gb.alleles(best); + break; + } + } + + /** + * Subset the samples in VC to reference only information with ref call alleles + * + * Preserves DP if present + * + * @param vc the variant context to subset down to + * @param ploidy ploidy to use if a genotype doesn't have any alleles + * @return a GenotypesContext + */ + public static GenotypesContext subsetToRefOnly(final VariantContext vc, final int ploidy) { + if ( vc == null ) throw new IllegalArgumentException("vc cannot be null"); + if ( ploidy < 1 ) throw new IllegalArgumentException("ploidy must be >= 1 but got " + ploidy); + + // the genotypes with PLs + final GenotypesContext oldGTs = vc.getGenotypes(); + + // optimization: if no input genotypes, just exit + if (oldGTs.isEmpty()) return oldGTs; + + // the new genotypes to create + final GenotypesContext newGTs = GenotypesContext.create(); + + final Allele ref = vc.getReference(); + final List diploidRefAlleles = Arrays.asList(ref, ref); + + // create the new genotypes + for ( final Genotype g : vc.getGenotypes() ) { + final int gPloidy = g.getPloidy() == 0 ? ploidy : g.getPloidy(); + final List refAlleles = gPloidy == 2 ? diploidRefAlleles : Collections.nCopies(gPloidy, ref); + final GenotypeBuilder gb = new GenotypeBuilder(g.getSampleName(), refAlleles); + if ( g.hasDP() ) gb.DP(g.getDP()); + if ( g.hasGQ() ) gb.GQ(g.getGQ()); + newGTs.add(gb.make()); } return newGTs; @@ -539,7 +643,7 @@ public class GATKVariantContextUtils { * @return genotypes context */ public static GenotypesContext assignDiploidGenotypes(final VariantContext vc) { - return subsetDiploidAlleles(vc, vc.getAlleles(), true); + return subsetDiploidAlleles(vc, vc.getAlleles(), GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN); } /** @@ -557,7 +661,7 @@ public class GATKVariantContextUtils { * @return a list of bi-allelic (or monomorphic) variant context */ public static List splitVariantContextToBiallelics(final VariantContext vc) { - return splitVariantContextToBiallelics(vc, false); + return splitVariantContextToBiallelics(vc, false, GenotypeAssignmentMethod.SET_TO_NO_CALL); } /** @@ -575,18 +679,18 @@ public class GATKVariantContextUtils { * @param trimLeft if true, we will also left trim alleles, potentially moving the resulting vcs forward on the genome * @return a list of bi-allelic (or monomorphic) variant context */ - public static List splitVariantContextToBiallelics(final VariantContext vc, final boolean trimLeft) { + public static List splitVariantContextToBiallelics(final VariantContext vc, final boolean trimLeft, final GenotypeAssignmentMethod genotypeAssignmentMethod) { if ( ! vc.isVariant() || vc.isBiallelic() ) // non variant or biallelics already satisfy the contract return Collections.singletonList(vc); else { - final List biallelics = new LinkedList(); + final List biallelics = new LinkedList<>(); for ( final Allele alt : vc.getAlternateAlleles() ) { VariantContextBuilder builder = new VariantContextBuilder(vc); final List alleles = Arrays.asList(vc.getReference(), alt); builder.alleles(alleles); - builder.genotypes(subsetDiploidAlleles(vc, alleles, false)); + builder.genotypes(subsetDiploidAlleles(vc, alleles, genotypeAssignmentMethod)); VariantContextUtils.calculateChromosomeCounts(builder, true); final VariantContext trimmed = trimAlleles(builder.make(), trimLeft, true); biallelics.add(trimmed); @@ -697,6 +801,7 @@ public class GATKVariantContextUtils { int maxAC = -1; final Map attributesWithMaxAC = new LinkedHashMap(); double log10PError = CommonInfo.NO_LOG10_PERROR; + boolean anyVCHadFiltersApplied = false; VariantContext vcWithMaxAC = null; GenotypesContext genotypes = GenotypesContext.create(); @@ -729,6 +834,7 @@ public class GATKVariantContextUtils { log10PError = vc.getLog10PError(); filters.addAll(vc.getFilters()); + anyVCHadFiltersApplied |= vc.filtersWereApplied(); // // add attributes @@ -841,7 +947,9 @@ public class GATKVariantContextUtils { builder.alleles(alleles); builder.genotypes(genotypes); builder.log10PError(log10PError); - builder.filters(filters.isEmpty() ? filters : new TreeSet(filters)); + if ( anyVCHadFiltersApplied ) { + builder.filters(filters.isEmpty() ? filters : new TreeSet<>(filters)); + } builder.attributes(new TreeMap(mergeInfoWithMaxAC ? attributesWithMaxAC : attributes)); // Trim the padded bases of all alleles if necessary diff --git a/public/java/test/org/broadinstitute/sting/WalkerTest.java b/public/java/test/org/broadinstitute/sting/WalkerTest.java index 40f1f7bcd..78f67967b 100644 --- a/public/java/test/org/broadinstitute/sting/WalkerTest.java +++ b/public/java/test/org/broadinstitute/sting/WalkerTest.java @@ -220,7 +220,7 @@ public class WalkerTest extends BaseTest { String args = this.args; if ( includeImplicitArgs ) { args = args + (ENABLE_PHONE_HOME_FOR_TESTS ? - String.format(" -et %s ", GATKRunReport.PhoneHomeOption.STANDARD) : + String.format(" -et %s ", GATKRunReport.PhoneHomeOption.AWS) : String.format(" -et %s -K %s ", GATKRunReport.PhoneHomeOption.NO_ET, gatkKeyFile)); if ( includeShadowBCF && GENERATE_SHADOW_BCF ) args = args + " --generateShadowBCF "; @@ -312,6 +312,10 @@ public class WalkerTest extends BaseTest { for (int i = 0; i < spec.nOutputFiles; i++) { String ext = spec.exts == null ? ".tmp" : "." + spec.exts.get(i); File fl = createTempFile(String.format("walktest.tmp_param.%d", i), ext); + + // Mark corresponding *.idx for deletion on exit as well just in case an index is created for the temp file: + new File(fl.getAbsolutePath() + ".idx").deleteOnExit(); + tmpFiles.add(fl); } diff --git a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java index 8d0874ea1..aca6cf984 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/EngineFeaturesIntegrationTest.java @@ -25,13 +25,32 @@ package org.broadinstitute.sting.gatk; +import net.sf.samtools.SAMFileReader; +import net.sf.samtools.SAMRecord; +import org.broad.tribble.readers.AsciiLineReader; import org.broadinstitute.sting.WalkerTest; +import org.broadinstitute.sting.commandline.Output; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.filters.MappingQualityUnavailableFilter; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.walkers.ReadFilters; +import org.broadinstitute.sting.gatk.walkers.ReadWalker; import org.broadinstitute.sting.gatk.walkers.qc.ErrorThrowing; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; +import org.broadinstitute.sting.utils.sam.GATKSamRecordFactory; +import org.broadinstitute.sting.utils.variant.GATKVCFUtils; +import org.broadinstitute.variant.vcf.VCFCodec; +import org.broadinstitute.variant.vcf.VCFHeader; +import org.broadinstitute.variant.vcf.VCFHeaderLine; +import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; +import java.io.File; +import java.io.FileInputStream; +import java.io.PrintStream; import java.util.Arrays; /** @@ -123,7 +142,159 @@ public class EngineFeaturesIntegrationTest extends WalkerTest { final String root = "-T ErrorThrowing -R " + exampleFASTA; final String args = root + cfg.args + " -E " + cfg.expectedException.getSimpleName(); WalkerTestSpec spec = new WalkerTestSpec(args, 0, cfg.expectedException); + executeTest(cfg.toString(), spec); } } + + // -------------------------------------------------------------------------------- + // + // Test that read filters are being applied in the order we expect + // + // -------------------------------------------------------------------------------- + + @ReadFilters({MappingQualityUnavailableFilter.class}) + public static class DummyReadWalkerWithMapqUnavailableFilter extends ReadWalker { + @Output + PrintStream out; + + @Override + public Integer map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker metaDataTracker) { + return 1; + } + + @Override + public Integer reduceInit() { + return 0; + } + + @Override + public Integer reduce(Integer value, Integer sum) { + return value + sum; + } + + @Override + public void onTraversalDone(Integer result) { + out.println(result); + } + } + + @Test(enabled = true) + public void testUserReadFilterAppliedBeforeWalker() { + WalkerTestSpec spec = new WalkerTestSpec("-R " + b37KGReference + " -I " + privateTestDir + "allMAPQ255.bam" + + " -T DummyReadWalkerWithMapqUnavailableFilter -o %s -L MT -rf ReassignMappingQuality", + 1, Arrays.asList("ecf27a776cdfc771defab1c5d19de9ab")); + executeTest("testUserReadFilterAppliedBeforeWalker", spec); + } + + @Test + public void testNegativeCompress() { + testBadCompressArgument(-1); + } + + @Test + public void testTooBigCompress() { + testBadCompressArgument(100); + } + + private void testBadCompressArgument(final int compress) { + WalkerTestSpec spec = new WalkerTestSpec("-T PrintReads -R " + b37KGReference + " -I private/testdata/NA12878.1_10mb_2_10mb.bam -o %s -compress " + compress, + 1, UserException.class); + executeTest("badCompress " + compress, spec); + } + + // -------------------------------------------------------------------------------- + // + // Test that the VCF version key is what we expect + // + // -------------------------------------------------------------------------------- + @Test(enabled = true) + public void testGATKVersionInVCF() throws Exception { + WalkerTestSpec spec = new WalkerTestSpec("-T SelectVariants -R " + b37KGReference + + " -V " + privateTestDir + "NA12878.WGS.b37.chr20.firstMB.vcf" + + " -o %s -L 20:61098", + 1, Arrays.asList("")); + spec.disableShadowBCF(); + final File vcf = executeTest("testGATKVersionInVCF", spec).first.get(0); + final VCFHeader header = (VCFHeader)new VCFCodec().readHeader(new AsciiLineReader(new FileInputStream(vcf))); + final VCFHeaderLine versionLine = header.getMetaDataLine(GATKVCFUtils.GATK_COMMAND_LINE_KEY); + Assert.assertNotNull(versionLine); + Assert.assertTrue(versionLine.toString().contains("SelectVariants")); + } + + @Test(enabled = true) + public void testMultipleGATKVersionsInVCF() throws Exception { + WalkerTestSpec spec = new WalkerTestSpec("-T SelectVariants -R " + b37KGReference + + " -V " + privateTestDir + "gatkCommandLineInHeader.vcf" + + " -o %s", + 1, Arrays.asList("")); + spec.disableShadowBCF(); + final File vcf = executeTest("testMultipleGATKVersionsInVCF", spec).first.get(0); + final VCFHeader header = (VCFHeader)new VCFCodec().readHeader(new AsciiLineReader(new FileInputStream(vcf))); + + boolean foundHC = false; + boolean foundSV = false; + for ( final VCFHeaderLine line : header.getMetaDataInInputOrder() ) { + if ( line.getKey().equals(GATKVCFUtils.GATK_COMMAND_LINE_KEY) ) { + if ( line.toString().contains("HaplotypeCaller") ) { + Assert.assertFalse(foundHC); + foundHC = true; + } + if ( line.toString().contains("SelectVariants") ) { + Assert.assertFalse(foundSV); + foundSV = true; + } + } + } + + Assert.assertTrue(foundHC, "Didn't find HaplotypeCaller command line header field"); + Assert.assertTrue(foundSV, "Didn't find SelectVariants command line header field"); + } + + // -------------------------------------------------------------------------------- + // + // Test that defaultBaseQualities actually works + // + // -------------------------------------------------------------------------------- + + public WalkerTestSpec testDefaultBaseQualities(final Integer value, final String md5) { + return new WalkerTestSpec("-T PrintReads -R " + b37KGReference + " -I " + privateTestDir + "/baseQualitiesToFix.bam -o %s" + + (value != null ? " --defaultBaseQualities " + value : ""), + 1, Arrays.asList(md5)); + } + + @Test() + public void testDefaultBaseQualities20() { + executeTest("testDefaultBaseQualities20", testDefaultBaseQualities(20, "7d254a9d0ec59c66ee3e137f56f4c78f")); + } + + @Test() + public void testDefaultBaseQualities30() { + executeTest("testDefaultBaseQualities30", testDefaultBaseQualities(30, "0f50def6cbbbd8ccd4739e2b3998e503")); + } + + @Test(expectedExceptions = Exception.class) + public void testDefaultBaseQualitiesNoneProvided() { + executeTest("testDefaultBaseQualitiesNoneProvided", testDefaultBaseQualities(null, "")); + } + + @Test + public void testGATKEngineConsolidatesCigars() { + final WalkerTestSpec spec = new WalkerTestSpec(" -T PrintReads" + + " -R " + b37KGReference + + " -I " + privateTestDir + "zero_length_cigar_elements.bam" + + " -o %s", + 1, Arrays.asList("")); // No MD5s; we only want to check the cigar + + final File outputBam = executeTest("testGATKEngineConsolidatesCigars", spec).first.get(0); + final SAMFileReader reader = new SAMFileReader(outputBam); + reader.setValidationStringency(SAMFileReader.ValidationStringency.SILENT); + reader.setSAMRecordFactory(new GATKSamRecordFactory()); + + final SAMRecord read = reader.iterator().next(); + reader.close(); + + // Original cigar was 0M3M0M8M. Check that it's been consolidated after running through the GATK engine: + Assert.assertEquals(read.getCigarString(), "11M", "Cigar 0M3M0M8M not consolidated correctly by the engine"); + } } \ No newline at end of file diff --git a/public/java/test/org/broadinstitute/sting/gatk/ReadMetricsUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/ReadMetricsUnitTest.java new file mode 100644 index 000000000..56725147e --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/ReadMetricsUnitTest.java @@ -0,0 +1,371 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk; + +import net.sf.picard.reference.IndexedFastaSequenceFile; +import net.sf.samtools.*; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.commandline.Tags; +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.datasources.providers.LocusShardDataProvider; +import org.broadinstitute.sting.gatk.datasources.providers.ReadShardDataProvider; +import org.broadinstitute.sting.gatk.datasources.reads.*; +import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource; +import org.broadinstitute.sting.gatk.executive.WindowMaker; +import org.broadinstitute.sting.gatk.filters.ReadFilter; +import org.broadinstitute.sting.gatk.iterators.ReadTransformer; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.resourcemanagement.ThreadAllocation; +import org.broadinstitute.sting.gatk.traversals.*; +import org.broadinstitute.sting.gatk.walkers.*; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.GenomeLocSortedSet; +import org.broadinstitute.sting.utils.SampleUtils; +import org.broadinstitute.sting.utils.activeregion.ActiveRegion; +import org.broadinstitute.sting.utils.activeregion.ActivityProfileState; +import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; +import org.broadinstitute.sting.utils.sam.*; +import org.testng.Assert; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; + +import java.io.File; +import java.io.IOException; +import java.util.*; + +public class ReadMetricsUnitTest extends BaseTest { + + @Test + public void testReadsSeenDoNotOverflowInt() { + + final ReadMetrics metrics = new ReadMetrics(); + + final long moreThanMaxInt = ((long)Integer.MAX_VALUE) + 1L; + + for ( long i = 0L; i < moreThanMaxInt; i++ ) { + metrics.incrementNumReadsSeen(); + } + + Assert.assertEquals(metrics.getNumReadsSeen(), moreThanMaxInt); + Assert.assertTrue(metrics.getNumReadsSeen() > (long) Integer.MAX_VALUE); + + logger.warn(String.format("%d %d %d", Integer.MAX_VALUE, moreThanMaxInt, Long.MAX_VALUE)); + } + + + // Test the accuracy of the read metrics + + private IndexedFastaSequenceFile reference; + private SAMSequenceDictionary dictionary; + private SAMFileHeader header; + private GATKSAMReadGroupRecord readGroup; + private GenomeLocParser genomeLocParser; + private File testBAM; + + private static final int numReadsPerContig = 250000; + private static final List contigs = Arrays.asList("1", "2", "3"); + + @BeforeClass + private void init() throws IOException { + reference = new CachingIndexedFastaSequenceFile(new File(b37KGReference)); + dictionary = reference.getSequenceDictionary(); + genomeLocParser = new GenomeLocParser(dictionary); + header = ArtificialSAMUtils.createDefaultReadGroup(new SAMFileHeader(), "test", "test"); + header.setSequenceDictionary(dictionary); + header.setSortOrder(SAMFileHeader.SortOrder.coordinate); + readGroup = new GATKSAMReadGroupRecord(header.getReadGroup("test")); + + final List reads = new ArrayList<>(); + for ( final String contig : contigs ) { + for ( int i = 1; i <= numReadsPerContig; i++ ) { + reads.add(buildSAMRecord("read" + contig + "_" + i, contig, i)); + } + } + + createBAM(reads); + } + + private void createBAM(final List reads) throws IOException { + testBAM = File.createTempFile("TraverseActiveRegionsUnitTest", ".bam"); + testBAM.deleteOnExit(); + + SAMFileWriter out = new SAMFileWriterFactory().setCreateIndex(true).makeBAMWriter(reads.get(0).getHeader(), true, testBAM); + for (GATKSAMRecord read : reads ) { + out.addAlignment(read); + } + out.close(); + + new File(testBAM.getAbsolutePath().replace(".bam", ".bai")).deleteOnExit(); + new File(testBAM.getAbsolutePath() + ".bai").deleteOnExit(); + } + + // copied from LocusViewTemplate + protected GATKSAMRecord buildSAMRecord(final String readName, final String contig, final int alignmentStart) { + GATKSAMRecord record = new GATKSAMRecord(header); + + record.setReadName(readName); + record.setReferenceIndex(dictionary.getSequenceIndex(contig)); + record.setAlignmentStart(alignmentStart); + + record.setCigarString("1M"); + record.setReadString("A"); + record.setBaseQualityString("A"); + record.setReadGroup(readGroup); + + return record; + } + + @Test + public void testCountsFromReadTraversal() { + final GenomeAnalysisEngine engine = new GenomeAnalysisEngine(); + engine.setGenomeLocParser(genomeLocParser); + + final Collection samFiles = new ArrayList<>(); + final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags()); + samFiles.add(readerID); + + final SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser, + false, + SAMFileReader.ValidationStringency.STRICT, + null, + null, + new ValidationExclusion(), + new ArrayList(), + new ArrayList(), + false, (byte)30, false, true); + + engine.setReadsDataSource(dataSource); + + final TraverseReadsNano traverseReadsNano = new TraverseReadsNano(1); + final DummyReadWalker walker = new DummyReadWalker(); + traverseReadsNano.initialize(engine, walker, null); + + for ( final Shard shard : dataSource.createShardIteratorOverAllReads(new ReadShardBalancer()) ) { + final ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard, engine.getGenomeLocParser(), dataSource.seek(shard), reference, new ArrayList()); + traverseReadsNano.traverse(walker, dataProvider, 0); + dataProvider.close(); + } + + Assert.assertEquals(engine.getCumulativeMetrics().getNumReadsSeen(), contigs.size() * numReadsPerContig); + Assert.assertEquals(engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig); + } + + @Test + public void testCountsFromLocusTraversal() { + final GenomeAnalysisEngine engine = new GenomeAnalysisEngine(); + engine.setGenomeLocParser(genomeLocParser); + + final Collection samFiles = new ArrayList<>(); + final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags()); + samFiles.add(readerID); + + final SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser, + false, + SAMFileReader.ValidationStringency.STRICT, + null, + null, + new ValidationExclusion(), + new ArrayList(), + new ArrayList(), + false, (byte)30, false, true); + + engine.setReadsDataSource(dataSource); + final Set samples = SampleUtils.getSAMFileSamples(dataSource.getHeader()); + + final TraverseLociNano traverseLociNano = new TraverseLociNano(1); + final DummyLocusWalker walker = new DummyLocusWalker(); + traverseLociNano.initialize(engine, walker, null); + + for ( final Shard shard : dataSource.createShardIteratorOverAllReads(new LocusShardBalancer()) ) { + final WindowMaker windowMaker = new WindowMaker(shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs(), samples); + for ( WindowMaker.WindowMakerIterator window : windowMaker ) { + final LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, shard.getReadProperties(), genomeLocParser, window.getLocus(), window, reference, new ArrayList()); + traverseLociNano.traverse(walker, dataProvider, 0); + dataProvider.close(); + } + windowMaker.close(); + } + + //dataSource.close(); + Assert.assertEquals(engine.getCumulativeMetrics().getNumReadsSeen(), contigs.size() * numReadsPerContig); + Assert.assertEquals(engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig); + } + + @Test + public void testCountsFromActiveRegionTraversal() { + final GenomeAnalysisEngine engine = new GenomeAnalysisEngine(); + engine.setGenomeLocParser(genomeLocParser); + + final Collection samFiles = new ArrayList<>(); + final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags()); + samFiles.add(readerID); + + final SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser, + false, + SAMFileReader.ValidationStringency.STRICT, + null, + null, + new ValidationExclusion(), + new ArrayList(), + new ArrayList(), + false, (byte)30, false, true); + + engine.setReadsDataSource(dataSource); + final Set samples = SampleUtils.getSAMFileSamples(dataSource.getHeader()); + + final List intervals = new ArrayList<>(contigs.size()); + for ( final String contig : contigs ) + intervals.add(genomeLocParser.createGenomeLoc(contig, 1, numReadsPerContig)); + + final TraverseActiveRegions traverseActiveRegions = new TraverseActiveRegions(); + final DummyActiveRegionWalker walker = new DummyActiveRegionWalker(); + traverseActiveRegions.initialize(engine, walker, null); + + for ( final Shard shard : dataSource.createShardIteratorOverIntervals(new GenomeLocSortedSet(genomeLocParser, intervals), new ActiveRegionShardBalancer()) ) { + final WindowMaker windowMaker = new WindowMaker(shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs(), samples); + for ( WindowMaker.WindowMakerIterator window : windowMaker ) { + final LocusShardDataProvider dataProvider = new LocusShardDataProvider(shard, shard.getReadProperties(), genomeLocParser, window.getLocus(), window, reference, new ArrayList()); + traverseActiveRegions.traverse(walker, dataProvider, 0); + dataProvider.close(); + } + windowMaker.close(); + } + + Assert.assertEquals(engine.getCumulativeMetrics().getNumReadsSeen(), contigs.size() * numReadsPerContig); + Assert.assertEquals(engine.getCumulativeMetrics().getNumIterations(), contigs.size() * numReadsPerContig); + } + + @Test + public void testFilteredCounts() { + final GenomeAnalysisEngine engine = new GenomeAnalysisEngine(); + engine.setGenomeLocParser(genomeLocParser); + + final Collection samFiles = new ArrayList<>(); + final SAMReaderID readerID = new SAMReaderID(testBAM, new Tags()); + samFiles.add(readerID); + + final List filters = new ArrayList<>(); + filters.add(new EveryTenthReadFilter()); + + final SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser, + false, + SAMFileReader.ValidationStringency.STRICT, + null, + null, + new ValidationExclusion(), + filters, + new ArrayList(), + false, (byte)30, false, true); + + engine.setReadsDataSource(dataSource); + + final TraverseReadsNano traverseReadsNano = new TraverseReadsNano(1); + final DummyReadWalker walker = new DummyReadWalker(); + traverseReadsNano.initialize(engine, walker, null); + + for ( final Shard shard : dataSource.createShardIteratorOverAllReads(new ReadShardBalancer()) ) { + final ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard, engine.getGenomeLocParser(), dataSource.seek(shard), reference, new ArrayList()); + traverseReadsNano.traverse(walker, dataProvider, 0); + dataProvider.close(); + } + + Assert.assertEquals((long)engine.getCumulativeMetrics().getCountsByFilter().get(EveryTenthReadFilter.class.getSimpleName()), contigs.size() * numReadsPerContig / 10); + } + + class DummyLocusWalker extends LocusWalker { + @Override + public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { + return 0; + } + + @Override + public Integer reduceInit() { + return 0; + } + + @Override + public Integer reduce(Integer value, Integer sum) { + return 0; + } + } + + class DummyReadWalker extends ReadWalker { + @Override + public Integer map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker metaDataTracker) { + return 0; + } + + @Override + public Integer reduceInit() { + return 0; + } + + @Override + public Integer reduce(Integer value, Integer sum) { + return 0; + } + } + + class DummyActiveRegionWalker extends ActiveRegionWalker { + @Override + public ActivityProfileState isActive(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { + return new ActivityProfileState(ref.getLocus(), 0.0); + } + + @Override + public Integer map(ActiveRegion activeRegion, RefMetaDataTracker metaDataTracker) { + return 0; + } + + @Override + public Integer reduceInit() { + return 0; + } + + @Override + public Integer reduce(Integer value, Integer sum) { + return 0; + } + } + + private final class EveryTenthReadFilter extends ReadFilter { + + private int myCounter = 0; + + @Override + public boolean filterOut(final SAMRecord record) { + if ( ++myCounter == 10 ) { + myCounter = 0; + return true; + } + + return false; + } + } +} \ No newline at end of file diff --git a/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedViewUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedViewUnitTest.java similarity index 98% rename from public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedViewUnitTest.java rename to public/java/test/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedViewUnitTest.java index bf4d36d92..784bd727e 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReadBasedReferenceOrderedViewUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/IntervalReferenceOrderedViewUnitTest.java @@ -49,7 +49,7 @@ import java.util.*; /** * @author depristo */ -public class ReadBasedReferenceOrderedViewUnitTest extends BaseTest { +public class IntervalReferenceOrderedViewUnitTest extends BaseTest { private static int startingChr = 1; private static int endingChr = 2; private static int readCount = 100; @@ -285,7 +285,7 @@ public class ReadBasedReferenceOrderedViewUnitTest extends BaseTest { Collections.sort(intervals); final GenomeLoc span = span(intervals); - final ReadBasedReferenceOrderedView view = new ReadBasedReferenceOrderedView(genomeLocParser, span, names, iterators); + final IntervalReferenceOrderedView view = new IntervalReferenceOrderedView(genomeLocParser, span, names, iterators); if ( testStateless ) { // test each tracker is well formed, as each is created diff --git a/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedViewUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedViewUnitTest.java index fad632cfd..1d39f43c6 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedViewUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/datasources/providers/ReferenceOrderedViewUnitTest.java @@ -97,7 +97,7 @@ public class ReferenceOrderedViewUnitTest extends BaseTest { LocusShardDataProvider provider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, seq, Collections.emptyList()); ReferenceOrderedView view = new ManagingReferenceOrderedView( provider ); - RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",10), null); + RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",10)); Assert.assertEquals(tracker.getValues(Feature.class).size(), 0, "The tracker should not have produced any data"); } @@ -115,7 +115,7 @@ public class ReferenceOrderedViewUnitTest extends BaseTest { LocusShardDataProvider provider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, seq, Collections.singletonList(dataSource)); ReferenceOrderedView view = new ManagingReferenceOrderedView( provider ); - RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",20), null); + RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",20)); TableFeature datum = tracker.getFirstValue(new RodBinding(TableFeature.class, "tableTest")); Assert.assertEquals(datum.get("COL1"),"C","datum parameter for COL1 is incorrect"); @@ -141,7 +141,7 @@ public class ReferenceOrderedViewUnitTest extends BaseTest { LocusShardDataProvider provider = new LocusShardDataProvider(shard, null, genomeLocParser, shard.getGenomeLocs().get(0), null, seq, Arrays.asList(dataSource1,dataSource2)); ReferenceOrderedView view = new ManagingReferenceOrderedView( provider ); - RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",20), null); + RefMetaDataTracker tracker = view.getReferenceOrderedDataAtLocus(genomeLocParser.createGenomeLoc("chrM",20)); TableFeature datum1 = tracker.getFirstValue(new RodBinding(TableFeature.class, "tableTest1")); Assert.assertEquals(datum1.get("COL1"),"C","datum1 parameter for COL1 is incorrect"); diff --git a/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/DownsamplerBenchmark.java b/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/DownsamplerBenchmark.java index 00389be97..25c71d570 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/DownsamplerBenchmark.java +++ b/public/java/test/org/broadinstitute/sting/gatk/datasources/reads/DownsamplerBenchmark.java @@ -26,7 +26,9 @@ package org.broadinstitute.sting.gatk.datasources.reads; import com.google.caliper.Param; +import org.broadinstitute.sting.gatk.WalkerManager; import org.broadinstitute.sting.gatk.downsampling.DownsamplingMethod; +import org.broadinstitute.sting.gatk.walkers.LocusWalker; import org.broadinstitute.sting.gatk.walkers.qc.CountLoci; /** @@ -86,7 +88,7 @@ public class DownsamplerBenchmark extends ReadProcessingBenchmark { }, PER_SAMPLE { @Override - DownsamplingMethod create() { return DownsamplingMethod.getDefaultDownsamplingMethod(new CountLoci()); } + DownsamplingMethod create() { return WalkerManager.getDownsamplingMethod(LocusWalker.class); } }; abstract DownsamplingMethod create(); } diff --git a/public/java/test/org/broadinstitute/sting/gatk/downsampling/DownsamplingIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/downsampling/DownsamplingIntegrationTest.java new file mode 100644 index 000000000..85f9169da --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/downsampling/DownsamplingIntegrationTest.java @@ -0,0 +1,44 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.downsampling; + +import org.broadinstitute.sting.WalkerTest; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.testng.annotations.Test; + +public class DownsamplingIntegrationTest extends WalkerTest { + + @Test + public void testDetectLowDcovValueWithLocusTraversal() { + final WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( + "-T CountLoci -R " + publicTestDir + "exampleFASTA.fasta -I " + publicTestDir + "exampleBAM.bam -o %s " + + "-dcov " + (DownsamplingMethod.MINIMUM_SAFE_COVERAGE_TARGET_FOR_LOCUS_BASED_TRAVERSALS - 1), + 1, + UserException.class + ); + executeTest("testDetectLowDcovValueWithLocusTraversal", spec); + } +} diff --git a/public/java/test/org/broadinstitute/sting/gatk/downsampling/FractionalDownsamplerUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/downsampling/FractionalDownsamplerUnitTest.java index 6f18d794f..8f0eee069 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/downsampling/FractionalDownsamplerUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/downsampling/FractionalDownsamplerUnitTest.java @@ -30,6 +30,7 @@ import net.sf.samtools.SAMRecord; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import org.testng.Assert; @@ -152,7 +153,39 @@ public class FractionalDownsamplerUnitTest extends BaseTest { Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), test.totalReads - downsampledReads.size()); - downsampler.reset(); + downsampler.resetStats(); Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0); } + + @Test + public void testDoNotDiscardReducedReads() { + GenomeAnalysisEngine.resetRandomGenerator(); + final ReadsDownsampler downsampler = new FractionalDownsampler(0.0); + + final Collection reads = new ArrayList(); + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000); + final int[] baseCounts = { 10, 10, 10, 10, 10 }; + + for ( int i = 1; i <= 10; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialReducedRead(header, "foo", 0, 1, 5, baseCounts)); + } + for ( int i = 1; i <= 5; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 1, 5)); + } + + downsampler.submit(reads); + downsampler.signalEndOfInput(); + + Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 5, "wrong number of items discarded by the downsampler"); + Assert.assertTrue(downsampler.hasFinalizedItems(), "downsampler should have finalized items but doesn't"); + Assert.assertEquals(downsampler.size(), 10, "downsampler size() reports wrong number of items"); + + final Collection readsReturned = downsampler.consumeFinalizedItems(); + + Assert.assertEquals(readsReturned.size(), 10, "wrong number of items returned by the downsampler"); + + for ( GATKSAMRecord readReturned : readsReturned ) { + Assert.assertTrue(readReturned.isReducedRead(), "non-reduced read survived the downsampling process, but shouldn't have"); + } + } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/downsampling/LevelingDownsamplerUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/downsampling/LevelingDownsamplerUnitTest.java index 972e51dcd..8cf0fd2a1 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/downsampling/LevelingDownsamplerUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/downsampling/LevelingDownsamplerUnitTest.java @@ -25,16 +25,17 @@ package org.broadinstitute.sting.gatk.downsampling; +import net.sf.samtools.SAMFileHeader; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; +import org.broadinstitute.sting.utils.locusiterator.AlignmentStateMachine; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.annotations.Test; import org.testng.annotations.DataProvider; import org.testng.Assert; -import java.util.ArrayList; -import java.util.Collection; -import java.util.LinkedList; -import java.util.List; +import java.util.*; public class LevelingDownsamplerUnitTest extends BaseTest { @@ -158,9 +159,46 @@ public class LevelingDownsamplerUnitTest extends BaseTest { Assert.assertEquals(numItemsReportedDiscarded, numItemsActuallyDiscarded); - downsampler.reset(); + downsampler.resetStats(); Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0); Assert.assertTrue(totalRemainingItems <= Math.max(test.targetSize, test.numStacks)); } + + @Test + public void testDoNotDiscardReducedReads() { + GenomeAnalysisEngine.resetRandomGenerator(); + final Downsampler> downsampler = new LevelingDownsampler, AlignmentStateMachine>(1); + + final Collection> groups = new LinkedList>(); + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000); + final int[] baseCounts = { 10, 10, 10, 10, 10 }; + + for ( int alignmentStart : Arrays.asList(1, 2, 3) ) { + final LinkedList group = new LinkedList(); + for ( int i = 1; i <= 10; i++ ) { + group.add(new AlignmentStateMachine(ArtificialSAMUtils.createArtificialReducedRead(header, "foo", 0, alignmentStart, 5, baseCounts))); + } + groups.add(group); + } + + downsampler.submit(groups); + downsampler.signalEndOfInput(); + + Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0, "wrong number of items discarded by the downsampler"); + Assert.assertTrue(downsampler.hasFinalizedItems(), "downsampler should have finalized items but doesn't"); + Assert.assertEquals(downsampler.size(), 30, "downsampler size() reports wrong number of items"); + + final Collection> groupsReturned = downsampler.consumeFinalizedItems(); + + Assert.assertEquals(groupsReturned.size(), 3, "wrong number of groups returned by the downsampler"); + + for ( LinkedList group : groupsReturned ) { + Assert.assertEquals(group.size(), 10, "group has wrong size after downsampling"); + + for ( AlignmentStateMachine state : group ) { + Assert.assertTrue(state.isReducedRead()); + } + } + } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsamplerUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsamplerUnitTest.java index 022eb02d2..a50201efd 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsamplerUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/downsampling/ReservoirDownsamplerUnitTest.java @@ -30,6 +30,7 @@ import net.sf.samtools.SAMRecord; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import org.testng.Assert; @@ -125,7 +126,49 @@ public class ReservoirDownsamplerUnitTest extends BaseTest { Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), test.expectedNumDiscardedItems); Assert.assertEquals(test.totalReads - downsampledReads.size(), test.expectedNumDiscardedItems); - downsampler.reset(); + downsampler.resetStats(); Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0); } + + @Test + public void testDoNotDiscardReducedReads() { + GenomeAnalysisEngine.resetRandomGenerator(); + final ReadsDownsampler downsampler = new ReservoirDownsampler(1); + + final Collection reads = new ArrayList(); + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000); + final int[] baseCounts = { 10, 10, 10, 10, 10 }; + + for ( int i = 1; i <= 10; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialReducedRead(header, "foo", 0, 1, 5, baseCounts)); + } + for ( int i = 1; i <= 5; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialRead(header, "foo", 0, 1, 5)); + } + + downsampler.submit(reads); + downsampler.signalEndOfInput(); + + Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 4, "wrong number of items discarded by the downsampler"); + Assert.assertTrue(downsampler.hasFinalizedItems(), "downsampler should have finalized items but doesn't"); + Assert.assertEquals(downsampler.size(), 11, "downsampler size() reports wrong number of items"); + + final Collection readsReturned = downsampler.consumeFinalizedItems(); + + Assert.assertEquals(readsReturned.size(), 11, "wrong number of items returned by the downsampler"); + + int numReducedReadsReturned = 0; + int numNormalReadsReturned = 0; + for ( GATKSAMRecord readReturned : readsReturned ) { + if ( readReturned.isReducedRead() ) { + numReducedReadsReturned++; + } + else { + numNormalReadsReturned++; + } + } + + Assert.assertEquals(numReducedReadsReturned, 10, "wrong number of reduced reads returned by the downsampler"); + Assert.assertEquals(numNormalReadsReturned, 1, "wrong number of non-reduced reads returned by the downsampler"); + } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsamplerUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsamplerUnitTest.java index c6b0dea29..bec0030d0 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsamplerUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/downsampling/SimplePositionalDownsamplerUnitTest.java @@ -177,7 +177,7 @@ public class SimplePositionalDownsamplerUnitTest extends BaseTest { Assert.assertEquals(numReadsActuallyEliminated, numReadsReportedEliminated); } - downsampler.reset(); + downsampler.resetStats(); Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 0); } @@ -328,4 +328,48 @@ public class SimplePositionalDownsamplerUnitTest extends BaseTest { Assert.assertEquals(downsampledReads.size(), 10); } + + @Test + public void testDoNotDiscardReducedReads() { + GenomeAnalysisEngine.resetRandomGenerator(); + final ReadsDownsampler downsampler = new SimplePositionalDownsampler(1); + + final Collection reads = new ArrayList(); + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000); + final int[] baseCounts = { 10, 10, 10, 10, 10 }; + + for ( int alignmentStart : Arrays.asList(1, 2, 3) ) { + for ( int i = 1; i <= 10; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialReducedRead(header, "foo", 0, alignmentStart, 5, baseCounts)); + } + for ( int i = 1; i <= 5; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialRead(header, "foo", 0, alignmentStart, 5)); + } + } + + downsampler.submit(reads); + downsampler.signalEndOfInput(); + + Assert.assertEquals(downsampler.getNumberOfDiscardedItems(), 12, "wrong number of items discarded by the downsampler"); + Assert.assertTrue(downsampler.hasFinalizedItems(), "downsampler should have finalized items but doesn't"); + Assert.assertEquals(downsampler.size(), 33, "downsampler size() reports wrong number of items"); + + final Collection readsReturned = downsampler.consumeFinalizedItems(); + + Assert.assertEquals(readsReturned.size(), 33, "wrong number of items returned by the downsampler"); + + int numReducedReadsReturned = 0; + int numNormalReadsReturned = 0; + for ( GATKSAMRecord readReturned : readsReturned ) { + if ( readReturned.isReducedRead() ) { + numReducedReadsReturned++; + } + else { + numNormalReadsReturned++; + } + } + + Assert.assertEquals(numReducedReadsReturned, 30, "wrong number of reduced reads returned by the downsampler"); + Assert.assertEquals(numNormalReadsReturned, 3, "wrong number of non-reduced reads returned by the downsampler"); + } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/filters/AllowNCigarMalformedReadFilterUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/filters/AllowNCigarMalformedReadFilterUnitTest.java new file mode 100644 index 000000000..d169bf7e9 --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/filters/AllowNCigarMalformedReadFilterUnitTest.java @@ -0,0 +1,77 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.filters; + + +import net.sf.samtools.SAMRecord; +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; +import org.testng.Assert; +import org.testng.annotations.Test; + +import java.util.Collections; + + +/** + * Tests for the {@link MalformedReadFilter} when the unsafe flag + * {@link ValidationExclusion.TYPE#ALLOW_N_CIGAR_READS} is set. + * + * @author Valentin Ruano-Rubio + * @since 6/6/13 + */ +public class AllowNCigarMalformedReadFilterUnitTest extends MalformedReadFilterUnitTest { + + + @Override + protected ValidationExclusion composeValidationExclusion() { + return new ValidationExclusion(Collections.singletonList(ValidationExclusion.TYPE.ALLOW_N_CIGAR_READS)); + } + + + @Test(enabled = true, + dataProvider= "UnsupportedCigarOperatorDataProvider") + @CigarOperatorTest(CigarOperatorTest.Outcome.IGNORE) + public void testCigarNOperatorFilterIgnore(final String cigarString) { + + final MalformedReadFilter filter = buildMalformedReadFilter(false); + final SAMRecord nContainingCigarRead = buildSAMRecord(cigarString); + Assert.assertFalse(filter.filterOut(nContainingCigarRead), + "filters out N containing Cigar when it should ignore the fact"); + } + + @Test(enabled = false) + @Override + public void testCigarNOperatorFilterException(final String cigarString) { + // Nothing to do here. + // Just deactivates the parents test case. + } + + + + + + + +} diff --git a/public/java/test/org/broadinstitute/sting/gatk/filters/MalformedReadFilterUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/filters/MalformedReadFilterUnitTest.java index 981d54d54..0d8515dde 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/filters/MalformedReadFilterUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/filters/MalformedReadFilterUnitTest.java @@ -25,11 +25,25 @@ package org.broadinstitute.sting.gatk.filters; -import org.broadinstitute.sting.utils.exceptions.UserException; + +import net.sf.samtools.Cigar; +import net.sf.samtools.SAMFileHeader; +import net.sf.samtools.SAMRecord; +import net.sf.samtools.TextCigarCodec; +import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; +import org.broadinstitute.sting.gatk.datasources.reads.SAMDataSource; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.Assert; +import org.testng.annotations.DataProvider; import org.testng.annotations.Test; +import org.broadinstitute.sting.utils.exceptions.UserException; +import org.broadinstitute.sting.utils.exceptions.UserException.UnsupportedCigarOperatorException; + +import java.lang.annotation.*; +import java.lang.reflect.Method; +import java.util.*; /** @@ -38,14 +52,14 @@ import org.testng.annotations.Test; * @author Eric Banks * @since 3/14/13 */ -public class MalformedReadFilterUnitTest { +public class MalformedReadFilterUnitTest extends ReadFilterTest { ////////////////////////////////////// // Test the checkSeqStored() method // ////////////////////////////////////// @Test(enabled = true) - public void testcheckSeqStored () { + public void testCheckSeqStored () { final GATKSAMRecord goodRead = ArtificialSAMUtils.createArtificialRead(new byte[]{(byte)'A'}, new byte[]{(byte)'A'}, "1M"); final GATKSAMRecord badRead = ArtificialSAMUtils.createArtificialRead(new byte[]{}, new byte[]{}, "1M"); @@ -59,4 +73,174 @@ public class MalformedReadFilterUnitTest { Assert.assertTrue(false, "We should have exceptioned out in the previous line"); } catch (UserException e) { } } + + @Test(enabled = true, dataProvider= "UnsupportedCigarOperatorDataProvider") + @CigarOperatorTest(CigarOperatorTest.Outcome.FILTER) + public void testCigarNOperatorFilterTruePositive(String cigarString) { + + final MalformedReadFilter filter = buildMalformedReadFilter(true); + final SAMRecord nContainingCigarRead = buildSAMRecord(cigarString); + Assert.assertTrue(filter.filterOut(nContainingCigarRead), + " Did not filtered out a N containing CIGAR read"); + } + + @Test(enabled = true, dataProvider= "UnsupportedCigarOperatorDataProvider") + @CigarOperatorTest(CigarOperatorTest.Outcome.ACCEPT) + public void testCigarNOperatorFilterTrueNegative(String cigarString) { + + final MalformedReadFilter filter = buildMalformedReadFilter(true); + final SAMRecord nonNContainingCigarRead = buildSAMRecord(cigarString); + Assert.assertFalse(filter.filterOut(nonNContainingCigarRead), + " Filtered out a non-N containing CIGAR read"); + } + + @Test(enabled = true, + expectedExceptions = UnsupportedCigarOperatorException.class, + dataProvider= "UnsupportedCigarOperatorDataProvider") + @CigarOperatorTest(CigarOperatorTest.Outcome.EXCEPTION) + public void testCigarNOperatorFilterException(final String cigarString) { + + final MalformedReadFilter filter = buildMalformedReadFilter(false); + final SAMRecord nContainingCigarRead = buildSAMRecord(cigarString); + + filter.filterOut(nContainingCigarRead); + } + + @Test(enabled = true, dataProvider="UnsupportedCigarOperatorDataProvider") + @CigarOperatorTest(CigarOperatorTest.Outcome.ACCEPT) + public void testCigarNOperatorFilterControl(final String cigarString) { + + final MalformedReadFilter filter = buildMalformedReadFilter(false); + final SAMRecord nonNContainingCigarRead = buildSAMRecord(cigarString); + + Assert.assertFalse(filter.filterOut(nonNContainingCigarRead)); + } + + protected SAMRecord buildSAMRecord(final String cigarString) { + final Cigar nContainingCigar = TextCigarCodec.getSingleton().decode(cigarString); + return this.createRead(nContainingCigar, 1, 0, 10); + } + + protected MalformedReadFilter buildMalformedReadFilter(final boolean filterRNO) { + return buildMalformedReadFiter(filterRNO,new ValidationExclusion.TYPE[] {}); + } + + protected MalformedReadFilter buildMalformedReadFiter(boolean filterRNO, final ValidationExclusion.TYPE... excl) { + final ValidationExclusion ve = new ValidationExclusion(Arrays.asList(excl)); + + final MalformedReadFilter filter = new MalformedReadFilter(); + + final SAMFileHeader h = getHeader(); + final SAMDataSource ds = getDataSource(); + + final GenomeAnalysisEngine gae = new GenomeAnalysisEngine() { + @Override + public SAMFileHeader getSAMFileHeader() { + return h; + } + + @Override + public SAMDataSource getReadsDataSource() { + return ds; + } + }; + filter.initialize(gae); + filter.filterReadsWithNCigar = filterRNO; + return filter; + } + + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.METHOD) + @Inherited + protected @interface CigarOperatorTest { + + enum Outcome { + ANY,ACCEPT,FILTER,EXCEPTION,IGNORE; + + public boolean appliesTo (String cigar) { + boolean hasN = cigar.indexOf('N') != -1; + switch (this) { + case ANY: return true; + case ACCEPT: return !hasN; + case IGNORE: return hasN; + case FILTER: + case EXCEPTION: + default: + return hasN; + + } + } + } + + Outcome value() default Outcome.ANY; + } + + /** + * Cigar test data for unsupported operator test. + * Each element of this array corresponds to a test case. In turn the first element of the test case array is the + * Cigar string for that test case and the second indicates whether it should be filtered due to the presence of a + * unsupported operator + */ + private static final String[] TEST_CIGARS = { + "101M10D20I10M", + "6M14N5M", + "1N", + "101M", + "110N", + "2N4M", + "4M2N", + "3M1I1M", + "1M2I2M", + "1M10N1I1M", + "1M1I1D", + "11N12M1I34M12N" + }; + + @DataProvider(name= "UnsupportedCigarOperatorDataProvider") + public Iterator unsupportedOperatorDataProvider(final Method testMethod) { + final CigarOperatorTest a = resolveCigarOperatorTestAnnotation(testMethod); + final List result = new LinkedList(); + for (final String cigarString : TEST_CIGARS) { + if (a == null || a.value().appliesTo(cigarString)) { + result.add(new Object[] { cigarString }); + } + } + return result.iterator(); + } + + /** + * Gets the most specific {@link CigarOperatorTest} annotation for the + * signature of the test method provided. + *

+ * This in-house implementation is required due to the fact that method + * annotations do not have inheritance. + * + * @param m targeted test method. + * @return null if there is no {@link CigarOperatorTest} + * annotation in this or overridden methods. + */ + private CigarOperatorTest resolveCigarOperatorTestAnnotation(final Method m) { + CigarOperatorTest res = m.getAnnotation(CigarOperatorTest.class); + if (res != null) { + return res; + } + Class c = this.getClass(); + Class p = c.getSuperclass(); + while (p != null && p != Object.class) { + try { + final Method met = p.getDeclaredMethod(m.getName(), + m.getParameterTypes()); + res = met.getAnnotation(CigarOperatorTest.class); + if (res != null) { + break; + } + } catch (NoSuchMethodException e) { + // Its ok; nothing to do here, just keep looking. + } + c = p; + p = c.getSuperclass(); + } + return res; + } + } diff --git a/public/java/test/org/broadinstitute/sting/gatk/filters/ReadFilterTest.java b/public/java/test/org/broadinstitute/sting/gatk/filters/ReadFilterTest.java new file mode 100644 index 000000000..5b6f67c42 --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/filters/ReadFilterTest.java @@ -0,0 +1,370 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.filters; + +import net.sf.samtools.*; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; +import org.broadinstitute.sting.gatk.datasources.reads.SAMDataSource; +import org.broadinstitute.sting.gatk.datasources.reads.SAMReaderID; +import org.broadinstitute.sting.gatk.downsampling.DownsamplingMethod; +import org.broadinstitute.sting.gatk.resourcemanagement.ThreadAllocation; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.testng.annotations.AfterClass; +import org.testng.annotations.BeforeClass; + +import java.util.*; + +/** + * Class ReadBaseTest + *

+ * This is the base test class for read filter test classes. All read + * filter test cases should extend from this + * class; it sets ups a header mock up to test read filtering. + * + * Feel free to override non-final method to modify the behavior + * (i.e. change how read group id are formatted, or complete a header). + * + *

+ * You can statically determine the number of read-group involved + * in the test by calling {@link #ReadFilterTest(int)} in you constructor. + *

+ * + * Notice that the same header object is shared by all test and + * it is initialized by Junit (calling {@link #beforeClass()}. + * + * @author Valentin Ruano Rubio + * @date May 23, 2013 + */ +public class ReadFilterTest extends BaseTest { + + private static final int DEFAULT_READ_GROUP_COUNT = 5; + private static final int DEFAULT_READER_COUNT = 1; + private static final String DEFAULT_READ_GROUP_PREFIX = "ReadGroup"; + private static final String DEFAULT_PLATFORM_UNIT_PREFIX = "Lane"; + private static final String DEFAULT_SAMPLE_NAME_PREFIX = "Sample"; + private static final String DEFAULT_PLATFORM_PREFIX = "Platform"; + private static final int DEFAULT_CHROMOSOME_COUNT = 1; + private static final int DEFAULT_CHROMOSOME_START_INDEX = 1; + private static final int DEFAULT_CHROMOSOME_SIZE = 1000; + private static final String DEFAULT_SAM_FILE_FORMAT = "readfile-%3d.bam"; + + private final int groupCount; + + private SAMFileHeader header; + + private SAMDataSource dataSource; + + /** + * Constructs a new read-filter test providing the number of read + * groups in the file. + * + * @param groupCount number of read-group in the fictional SAM file, + * must be equal or greater than 1. + */ + protected ReadFilterTest(final int groupCount) { + if (groupCount < 1) { + throw new IllegalArgumentException( + "the read group count must at least be 1"); + } + this.groupCount = groupCount; + } + + + /** + * Gets the data source. + * + * @throws IllegalStateException if the data source was not initialized + * invoking {@link #beforeClass()} + * @return never null + */ + protected final SAMDataSource getDataSource() { + checkDataSourceExists(); + return dataSource; + } + + /** + * Returns the mock-up SAM file header for testing. + * + * @throws IllegalStateException if the header was not initialized + * invoking {@link #beforeClass()} + * @return never null + */ + protected final SAMFileHeader getHeader() { + checkHeaderExists(); + return header; + } + + /** + * Construct a read filter test with the default number of groups + * ({@link #DEFAULT_READ_GROUP_COUNT}. + */ + public ReadFilterTest() { + this(DEFAULT_READ_GROUP_COUNT); + } + + /** + * Return the number of read groups involved in the test + * @return 1 or greater. + */ + protected final int getReadGroupCount() { + return groupCount; + } + + /** + * Composes the Id for the read group given its index. + * + * This methods must return a unique distinct ID for each possible index and + * it must be the same value each time it is invoked. + * + * @param index the index of the targeted read group in the range + * [1,{@link #getReadGroupCount()}] + * @return never null and must be unique to each possible + * read group index. + */ + protected String composeReadGroupId(final int index) { + checkReadGroupIndex(index); + return DEFAULT_READ_GROUP_PREFIX + index; + } + + /** + * Composes the Platform name for the read group given its index. + * + * This method must always return the same value give an index. + * + * @param index the index of the targeted read group in the range + * [1,{@link #getReadGroupCount()}] + * @return never null. + */ + protected String composePlatformName(final int index) { + checkReadGroupIndex(index); + return DEFAULT_PLATFORM_PREFIX + (((index-1)%2)+1); + } + + + /** + * Composes the Platform unit name for the read group given its index. + * + * @param index the index of the targeted read group in the range + * [1,{@link #getReadGroupCount()}] + * @return never null. + */ + protected String composePlatformUnitName(final int index) { + checkReadGroupIndex(index); + return DEFAULT_PLATFORM_UNIT_PREFIX + (((index-1)%3)+1); + } + + + + /** + * Checks the correctness of a given read group index. + * + * A correct index is any value in the range [1,{@link #getReadGroupCount()}]. + * + * @param index the target index. + * @throws IllegalArgumentException if the input index is not correct. + */ + protected final void checkReadGroupIndex(final int index) { + checkIndex(index,groupCount,"read group"); + } + + + private void checkIndex(final int index, final int max, CharSequence name) { + if (index < 1 || index > max) { + throw new IllegalArgumentException( + name + " index (" + + index + + ") is out of bounds [1," + max + "]"); + } + } + + + /** + * Checks whether the header was initialized. + * + * @throws IllegalStateException if the header was not yet initialized. + */ + protected final void checkHeaderExists() { + if (header == null) { + throw new IllegalArgumentException( + "header has not been initialized;" + + " beforeClass() was not invoked"); + } + } + + /** + * Checks whether the data source was initialized. + * + * @throws IllegalStateException if the data source was not yet initialized. + */ + protected final void checkDataSourceExists() { + if (header == null) { + throw new IllegalArgumentException( + "data source has not been initialized;" + + " beforeClass() was not invoked"); + } + } + + /** + * Returns the ID for a read group given its index. + * + * @param index the index of the targeted read group in the range + * [1,{@link #getReadGroupCount()}] + * @return never null and must be unique to each + * possible read group index. + */ + protected final String getReadGroupId(final int index) { + checkReadGroupIndex(index); + return getHeader().getReadGroups().get(index - 1).getReadGroupId(); + } + + /** + * Returns the platform name for a read group given its index. + * + * @param group the index of the targeted read group in the range + * [1,{@link #getReadGroupCount()}] + * @return never null. + */ + protected final String getPlatformName(final int group) { + checkReadGroupIndex(group); + return getHeader().getReadGroups().get(group - 1).getPlatform(); + } + + /** + * Returns the platform unit for a read group given its index. + * + * @param group the index of the targeted read group in the range + * [1,{@link #getReadGroupCount()}] + * @return never null. + */ + protected final String getPlatformUnit(final int group) { + checkReadGroupIndex(group); + return getHeader().getReadGroups().get(group - 1).getPlatformUnit(); + } + + + /** + * Composes the mock up SAM file header. + * + * It must return an equivalent (equal) value each time it is invoked. + * + * @return never null. + */ + protected SAMFileHeader composeHeader() { + + return ArtificialSAMUtils.createArtificialSamHeader( + DEFAULT_CHROMOSOME_COUNT, DEFAULT_CHROMOSOME_START_INDEX, + DEFAULT_CHROMOSOME_SIZE); + } + + @BeforeClass + public void beforeClass() { + + header = composeHeader(); + dataSource = composeDataSource(); + final List readGroupIDs = new ArrayList(); + final List sampleNames = new ArrayList(); + + for (int i = 1; i <= getReadGroupCount(); i++) { + final String readGroupId = composeReadGroupId(i); + readGroupIDs.add(readGroupId); + sampleNames.add(readGroupId); + } + + ArtificialSAMUtils.createEnumeratedReadGroups( + header, readGroupIDs, sampleNames); + + for (int i = 1; i <= getReadGroupCount(); i++) { + final String readGroupId = readGroupIDs.get(i-1); + final SAMReadGroupRecord groupRecord = header.getReadGroup(readGroupId); + groupRecord.setAttribute("PL", composePlatformName(i)); + groupRecord.setAttribute("PU", composePlatformUnitName(i)); + } + + } + + protected ValidationExclusion composeValidationExclusion() { + return new ValidationExclusion(); + } + + protected SAMDataSource composeDataSource() { + checkHeaderExists(); + final Set readerIDs = new HashSet<>(1); + final ThreadAllocation ta = new ThreadAllocation(); + final Integer numFileHandles = 1; // I believe that any value would do but need to confirm. + final boolean useOriginalBaseQualities = true; + final SAMFileReader.ValidationStringency strictness = SAMFileReader.ValidationStringency.LENIENT; + final Integer readBufferSize = 1; // not relevant. + final DownsamplingMethod downsamplingMethod = DownsamplingMethod.NONE; + final ValidationExclusion exclusionList = composeValidationExclusion(); + final Collection supplementalFilters = Collections.EMPTY_SET; + final boolean includeReadsWithDeletionAtLoci = true; + + final GenomeLocParser glp = new GenomeLocParser(header.getSequenceDictionary()); + final SAMDataSource res = new SAMDataSource( + readerIDs, + ta, + numFileHandles, + glp, + useOriginalBaseQualities, + strictness, + readBufferSize, + downsamplingMethod, + exclusionList, + supplementalFilters, + includeReadsWithDeletionAtLoci); + + return res; + } + + @AfterClass + public void afterClass() { + header = null; + dataSource = null; + } + + /** + * Creates a read record. + * + * @param cigar the new record CIGAR. + * @param group the new record group index that must be in the range \ + * [1,{@link #getReadGroupCount()}] + * @param reference the reference sequence index (0-based) + * @param start the start position of the read alignment in the reference + * (1-based) + * @return never null + */ + protected SAMRecord createRead(final Cigar cigar, final int group, final int reference, final int start) { + final SAMRecord record = ArtificialSAMUtils.createArtificialRead(cigar); + record.setHeader(getHeader()); + record.setAlignmentStart(start); + record.setReferenceIndex(reference); + record.setAttribute(SAMTag.RG.toString(), getReadGroupId(group)); + return record; + + } +} diff --git a/public/java/test/org/broadinstitute/sting/gatk/filters/ReadGroupBlackListFilterUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/filters/ReadGroupBlackListFilterUnitTest.java index 1370aeb50..1be31b293 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/filters/ReadGroupBlackListFilterUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/filters/ReadGroupBlackListFilterUnitTest.java @@ -26,13 +26,10 @@ package org.broadinstitute.sting.gatk.filters; import org.testng.Assert; -import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; -import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; -import net.sf.samtools.SAMFileHeader; import net.sf.samtools.SAMRecord; import net.sf.samtools.SAMReadGroupRecord; @@ -40,34 +37,7 @@ import java.util.List; import java.util.ArrayList; import java.util.Collections; -public class ReadGroupBlackListFilterUnitTest extends BaseTest { - private static final int READ_GROUP_COUNT = 5; - private static final String READ_GROUP_PREFIX = "ReadGroup"; - private static final String SAMPLE_NAME_PREFIX = "Sample"; - private static final String PLATFORM_PREFIX = "Platform"; - private static final String PLATFORM_UNIT_PREFIX = "Lane"; - private static SAMFileHeader header; - - @BeforeClass - public void beforeClass() { - header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000); - - List readGroupIDs = new ArrayList(); - List sampleNames = new ArrayList(); - - for (int i = 1; i <= READ_GROUP_COUNT; i++) { - readGroupIDs.add(READ_GROUP_PREFIX + i); - sampleNames.add(SAMPLE_NAME_PREFIX + i); - } - - ArtificialSAMUtils.createEnumeratedReadGroups(header, readGroupIDs, sampleNames); - - for (int i = 1; i <= READ_GROUP_COUNT; i++) { - SAMReadGroupRecord groupRecord = header.getReadGroup(READ_GROUP_PREFIX + i); - groupRecord.setAttribute("PL", PLATFORM_PREFIX + (((i-1)%2)+1)); - groupRecord.setAttribute("PU", PLATFORM_UNIT_PREFIX + (((i-1)%3)+1)); - } - } +public class ReadGroupBlackListFilterUnitTest extends ReadFilterTest { @Test(expectedExceptions=ReviewedStingException.class) public void testBadFilter() { @@ -88,14 +58,14 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { @Test public void testFilterReadGroup() { - SAMRecord filteredRecord = ArtificialSAMUtils.createArtificialRead(header, "readUno", 0, 1, 20); - filteredRecord.setAttribute("RG", READ_GROUP_PREFIX + "1"); + SAMRecord filteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, 1, 20); + filteredRecord.setAttribute("RG", getReadGroupId(1)); - SAMRecord unfilteredRecord = ArtificialSAMUtils.createArtificialRead(header, "readDos", 0, 2, 20); - unfilteredRecord.setAttribute("RG", READ_GROUP_PREFIX + "2"); + SAMRecord unfilteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readDos", 0, 2, 20); + unfilteredRecord.setAttribute("RG", getReadGroupId(2)); List filterList = new ArrayList(); - filterList.add("RG:" + READ_GROUP_PREFIX + "1"); + filterList.add("RG:" + getReadGroupId(1)); ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList); Assert.assertTrue(filter.filterOut(filteredRecord)); @@ -104,14 +74,14 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { @Test public void testFilterPlatformUnit() { - SAMRecord filteredRecord = ArtificialSAMUtils.createArtificialRead(header, "readUno", 0, 1, 20); - filteredRecord.setAttribute("RG", READ_GROUP_PREFIX + "1"); + SAMRecord filteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, 1, 20); + filteredRecord.setAttribute("RG", getReadGroupId(1)); - SAMRecord unfilteredRecord = ArtificialSAMUtils.createArtificialRead(header, "readDos", 0, 2, 20); - unfilteredRecord.setAttribute("RG", READ_GROUP_PREFIX + "2"); + SAMRecord unfilteredRecord = ArtificialSAMUtils.createArtificialRead(getHeader(), "readDos", 0, 2, 20); + unfilteredRecord.setAttribute("RG", getReadGroupId(2)); List filterList = new ArrayList(); - filterList.add("PU:" + PLATFORM_UNIT_PREFIX + "1"); + filterList.add("PU:" + getPlatformUnit(1)); ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList); Assert.assertTrue(filter.filterOut(filteredRecord)); @@ -123,18 +93,18 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { int recordsPerGroup = 3; List records = new ArrayList(); int alignmentStart = 0; - for (int x = 1; x <= READ_GROUP_COUNT; x++) { - SAMReadGroupRecord groupRecord = header.getReadGroup(READ_GROUP_PREFIX + x); + for (int x = 1; x <= getReadGroupCount(); x++) { + SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x)); for (int y = 1; y <= recordsPerGroup; y++) { - SAMRecord record = ArtificialSAMUtils.createArtificialRead(header, "readUno", 0, ++alignmentStart, 20); + SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20); record.setAttribute("RG", groupRecord.getReadGroupId()); records.add(record); } } List filterList = new ArrayList(); - filterList.add("RG:" + READ_GROUP_PREFIX + "1"); - filterList.add("RG:" + READ_GROUP_PREFIX + "3"); + filterList.add("RG:" + getReadGroupId(1)); + filterList.add("RG:" + getReadGroupId(3)); ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList); int filtered = 0; @@ -153,7 +123,7 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { } int filteredExpected = recordsPerGroup * 2; - int unfilteredExpected = recordsPerGroup * (READ_GROUP_COUNT - 2); + int unfilteredExpected = recordsPerGroup * (getReadGroupCount() - 2); Assert.assertEquals(filtered, filteredExpected, "Filtered"); Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered"); } @@ -163,17 +133,17 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { int recordsPerGroup = 3; List records = new ArrayList(); int alignmentStart = 0; - for (int x = 1; x <= READ_GROUP_COUNT; x++) { - SAMReadGroupRecord groupRecord = header.getReadGroup(READ_GROUP_PREFIX + x); + for (int x = 1; x <= getReadGroupCount(); x++) { + SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x)); for (int y = 1; y <= recordsPerGroup; y++) { - SAMRecord record = ArtificialSAMUtils.createArtificialRead(header, "readUno", 0, ++alignmentStart, 20); + SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20); record.setAttribute("RG", groupRecord.getReadGroupId()); records.add(record); } } List filterList = new ArrayList(); - filterList.add("PU:" + PLATFORM_UNIT_PREFIX + "1"); + filterList.add("PU:" + getPlatformUnit(1)); ReadGroupBlackListFilter filter = new ReadGroupBlackListFilter(filterList); int filtered = 0; @@ -202,10 +172,10 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { int recordsPerGroup = 3; List records = new ArrayList(); int alignmentStart = 0; - for (int x = 1; x <= READ_GROUP_COUNT; x++) { - SAMReadGroupRecord groupRecord = header.getReadGroup(READ_GROUP_PREFIX + x); + for (int x = 1; x <= getReadGroupCount(); x++) { + SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x)); for (int y = 1; y <= recordsPerGroup; y++) { - SAMRecord record = ArtificialSAMUtils.createArtificialRead(header, "readUno", 0, ++alignmentStart, 20); + SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20); record.setAttribute("RG", groupRecord.getReadGroupId()); records.add(record); } @@ -231,7 +201,7 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { } int filteredExpected = recordsPerGroup * 2; - int unfilteredExpected = recordsPerGroup * (READ_GROUP_COUNT - 2); + int unfilteredExpected = recordsPerGroup * (getReadGroupCount() - 2); Assert.assertEquals(filtered, filteredExpected, "Filtered"); Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered"); } @@ -241,10 +211,10 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { int recordsPerGroup = 3; List records = new ArrayList(); int alignmentStart = 0; - for (int x = 1; x <= READ_GROUP_COUNT; x++) { - SAMReadGroupRecord groupRecord = header.getReadGroup(READ_GROUP_PREFIX + x); + for (int x = 1; x <= getReadGroupCount(); x++) { + SAMReadGroupRecord groupRecord = getHeader().getReadGroup(getReadGroupId(x)); for (int y = 1; y <= recordsPerGroup; y++) { - SAMRecord record = ArtificialSAMUtils.createArtificialRead(header, "readUno", 0, ++alignmentStart, 20); + SAMRecord record = ArtificialSAMUtils.createArtificialRead(getHeader(), "readUno", 0, ++alignmentStart, 20); record.setAttribute("RG", groupRecord.getReadGroupId()); records.add(record); } @@ -270,7 +240,7 @@ public class ReadGroupBlackListFilterUnitTest extends BaseTest { } int filteredExpected = recordsPerGroup * 2; - int unfilteredExpected = recordsPerGroup * (READ_GROUP_COUNT - 2); + int unfilteredExpected = recordsPerGroup * (getReadGroupCount() - 2); Assert.assertEquals(filtered, filteredExpected, "Filtered"); Assert.assertEquals(unfiltered, unfilteredExpected, "Uniltered"); } diff --git a/public/java/test/org/broadinstitute/sting/gatk/filters/UnsafeMalformedReadFilterUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/filters/UnsafeMalformedReadFilterUnitTest.java new file mode 100644 index 000000000..30e2f0f1b --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/filters/UnsafeMalformedReadFilterUnitTest.java @@ -0,0 +1,50 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.filters; + + +import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; + +import java.util.Collections; + + +/** + * Tests for the {@link MalformedReadFilter} when the unsafe flag + * {@link ValidationExclusion.TYPE#ALL} is set. + * + * @author Valentin Ruano-Rubio + * @since 6/6/13 + */ +public class UnsafeMalformedReadFilterUnitTest extends AllowNCigarMalformedReadFilterUnitTest { + + + @Override + protected ValidationExclusion composeValidationExclusion() { + return new ValidationExclusion(Collections.singletonList(ValidationExclusion.TYPE.ALL)); + } + + +} diff --git a/public/java/test/org/broadinstitute/sting/gatk/iterators/ReadFormattingIteratorUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/iterators/ReadFormattingIteratorUnitTest.java new file mode 100644 index 000000000..5d037bc4b --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/iterators/ReadFormattingIteratorUnitTest.java @@ -0,0 +1,50 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.iterators; + +import net.sf.samtools.*; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; +import org.testng.Assert; +import org.testng.annotations.Test; + +import java.util.Arrays; + + +public class ReadFormattingIteratorUnitTest extends BaseTest { + + @Test + public void testIteratorConsolidatesCigars() { + final Cigar unconsolidatedCigar = TextCigarCodec.getSingleton().decode("3M0M5M0M"); + final SAMRecord unconsolidatedRead = ArtificialSAMUtils.createArtificialRead(unconsolidatedCigar); + + final StingSAMIterator readIterator = StingSAMIteratorAdapter.adapt(Arrays.asList(unconsolidatedRead).iterator()); + final ReadFormattingIterator formattingIterator = new ReadFormattingIterator(readIterator, false, (byte)-1); + final SAMRecord postIterationRead = formattingIterator.next(); + + Assert.assertEquals(postIterationRead.getCigarString(), "8M", "Cigar 3M0M5M0M not consolidated correctly by ReadFormattingIterator"); + } +} diff --git a/public/java/test/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCacheUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCacheUnitTest.java index f3e1ce44b..4d85997b3 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCacheUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/traversals/TAROrderedReadCacheUnitTest.java @@ -26,9 +26,11 @@ package org.broadinstitute.sting.gatk.traversals; import net.sf.picard.reference.IndexedFastaSequenceFile; +import net.sf.samtools.SAMFileHeader; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; import org.broadinstitute.sting.utils.sam.ArtificialBAMBuilder; +import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.Assert; import org.testng.annotations.BeforeClass; @@ -39,6 +41,7 @@ import java.io.File; import java.io.FileNotFoundException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.List; public class TAROrderedReadCacheUnitTest extends BaseTest { @@ -98,8 +101,53 @@ public class TAROrderedReadCacheUnitTest extends BaseTest { Assert.assertEquals(cache.getNumDiscarded(), 0, "should have reset stats"); Assert.assertEquals(cacheReads.size(), nExpectedToKeep, "should have 1 read for every read we expected to keep"); + verifySortednessOfReads(cacheReads); + } + + @Test + public void testReadCacheWithReducedReads() { + final List reads = new ArrayList(); + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000); + final int[] baseCounts = { 10, 10, 10, 10, 10 }; + + for ( int i = 1; i <= 100; i++ ) { + reads.add(ArtificialSAMUtils.createArtificialReducedRead(header, "foo", 0, i, 5, baseCounts)); + reads.add(ArtificialSAMUtils.createArtificialRead(header, "foo", 0, i, 5)); + } + + final TAROrderedReadCache cache = new TAROrderedReadCache(50); + + cache.addAll(reads); + + // Our cache should have kept all of the reduced reads (which are retained unconditionally and do not count + // towards the capacity limit), and discarded half of the 100 non-reduced reads due to the cache capacity + // limit of 50. + Assert.assertEquals(cache.size(), 150, "wrong number of reads in the cache at the end"); + Assert.assertEquals(cache.getNumDiscarded(), 50, "wrong number of reads discarded from the cache"); + + final List cacheReads = cache.popCurrentReads(); + + int numReducedReadsRetained = 0; + int numNormalReadsRetained = 0; + + for ( GATKSAMRecord read : cacheReads ) { + if ( read.isReducedRead() ) { + numReducedReadsRetained++; + } + else { + numNormalReadsRetained++; + } + } + + Assert.assertEquals(numReducedReadsRetained, 100, "wrong number of reduced reads retained in the cache"); + Assert.assertEquals(numNormalReadsRetained, 50, "wrong number of non-reduced reads retained in the cache"); + + verifySortednessOfReads(cacheReads); + } + + private void verifySortednessOfReads( final List reads) { int lastStart = -1; - for ( final GATKSAMRecord read : cacheReads ) { + for ( GATKSAMRecord read : reads ) { Assert.assertTrue(lastStart <= read.getAlignmentStart(), "Reads should be sorted but weren't. Found read with start " + read.getAlignmentStart() + " while last was " + lastStart); lastStart = read.getAlignmentStart(); } diff --git a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegionsUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegionsUnitTest.java index 1f5cd6d0e..e4b6c37cc 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegionsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegionsUnitTest.java @@ -405,8 +405,6 @@ public class TraverseActiveRegionsUnitTest extends BaseTest { for (LocusShardDataProvider dataProvider : createDataProviders(t, walker, intervals, bam)) t.traverse(walker, dataProvider, 0); - t.endTraversal(walker, 0); - return walker.mappedActiveRegions; } diff --git a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java index 8bc373fe8..5b52d4e33 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/traversals/TraverseReadsUnitTest.java @@ -32,6 +32,7 @@ import org.broadinstitute.sting.commandline.Tags; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.gatk.datasources.providers.ReadShardDataProvider; import org.broadinstitute.sting.gatk.datasources.reads.*; +import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource; import org.broadinstitute.sting.gatk.resourcemanagement.ThreadAllocation; import org.broadinstitute.sting.gatk.walkers.ReadWalker; import org.broadinstitute.sting.gatk.walkers.qc.CountReads; @@ -47,6 +48,7 @@ import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.PrintStream; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import static org.testng.Assert.fail; @@ -146,18 +148,18 @@ public class TraverseReadsUnitTest extends BaseTest { fail("Shard == null"); } - ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard,genomeLocParser,dataSource.seek(shard),null,null); + ReadShardDataProvider dataProvider = new ReadShardDataProvider(shard,genomeLocParser,dataSource.seek(shard),null, Collections.emptyList()); accumulator = traversalEngine.traverse(countReadWalker, dataProvider, accumulator); dataProvider.close(); } countReadWalker.onTraversalDone(accumulator); - if (!(accumulator instanceof Integer)) { - fail("Count read walker should return an interger."); + if (!(accumulator instanceof Long)) { + fail("Count read walker should return a Long."); } - if (((Integer) accumulator) != 10000) { - fail("there should be 10000 mapped reads in the index file, there was " + ((Integer) accumulator)); + if (!accumulator.equals(new Long(10000))) { + fail("there should be 10000 mapped reads in the index file, there was " + (accumulator)); } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/walkers/BAQIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/walkers/BAQIntegrationTest.java index 6b0422c6a..604c0e377 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/walkers/BAQIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/walkers/BAQIntegrationTest.java @@ -43,7 +43,7 @@ public class BAQIntegrationTest extends WalkerTest { // -------------------------------------------------------------------------------------------------------------- @Test public void testPrintReadsNoBAQ() { - WalkerTestSpec spec = new WalkerTestSpec( baseCommand +" -baq OFF", 1, Arrays.asList("11af64ba020262d06b490bae2c5e08f8")); + WalkerTestSpec spec = new WalkerTestSpec( baseCommand +" -baq OFF", 1, Arrays.asList("d1f74074e718c82810512bf40dbc7f72")); executeTest(String.format("testPrintReadsNoBAQ"), spec); } diff --git a/public/java/test/org/broadinstitute/sting/gatk/walkers/CNV/SymbolicAllelesIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/walkers/CNV/SymbolicAllelesIntegrationTest.java index 4aaba0d70..bfabe2bc1 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/walkers/CNV/SymbolicAllelesIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/walkers/CNV/SymbolicAllelesIntegrationTest.java @@ -57,7 +57,7 @@ public class SymbolicAllelesIntegrationTest extends WalkerTest { WalkerTestSpec spec = new WalkerTestSpec( baseTestString(b36KGReference, "symbolic_alleles_2.vcf"), 1, - Arrays.asList("bf5a09f783ab1fa44774c81f91d10921")); + Arrays.asList("30f66a097987330d42e87da8bcd6be21")); executeTest("Test symbolic alleles mixed in with non-symbolic alleles", spec); } } diff --git a/public/java/test/org/broadinstitute/sting/gatk/walkers/qc/CountReadsUnitTest.java b/public/java/test/org/broadinstitute/sting/gatk/walkers/qc/CountReadsUnitTest.java new file mode 100644 index 000000000..8f5541c41 --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/gatk/walkers/qc/CountReadsUnitTest.java @@ -0,0 +1,51 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.gatk.walkers.qc; + +import org.broadinstitute.sting.BaseTest; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class CountReadsUnitTest extends BaseTest { + + @Test + public void testReadsDoNotOverflowInt() { + + final CountReads walker = new CountReads(); + + final long moreThanMaxInt = ((long)Integer.MAX_VALUE) + 1L; + + Long sum = walker.reduceInit(); + + for ( long i = 0L; i < moreThanMaxInt; i++ ) { + final Integer x = walker.map(null, null, null); + sum = walker.reduce(x, sum); + } + + Assert.assertEquals(sum.longValue(), moreThanMaxInt); + Assert.assertTrue(sum.longValue() > (long) Integer.MAX_VALUE); + } +} diff --git a/public/java/test/org/broadinstitute/sting/gatk/walkers/readutils/PrintReadsIntegrationTest.java b/public/java/test/org/broadinstitute/sting/gatk/walkers/readutils/PrintReadsIntegrationTest.java index 7482eae60..adc7ad765 100644 --- a/public/java/test/org/broadinstitute/sting/gatk/walkers/readutils/PrintReadsIntegrationTest.java +++ b/public/java/test/org/broadinstitute/sting/gatk/walkers/readutils/PrintReadsIntegrationTest.java @@ -59,10 +59,10 @@ public class PrintReadsIntegrationTest extends WalkerTest { {new PRTest(hg18Reference, "HiSeq.1mb.bam", " -simplifyBAM", "1510dc4429f3ed49caf96da41e8ed396")}, {new PRTest(hg18Reference, "HiSeq.1mb.bam", " -n 10", "0e3d1748ad1cb523e3295cab9d09d8fc")}, // See: GATKBAMIndex.getStartOfLastLinearBin(), BAMScheduler.advance(), IntervalOverlapFilteringIterator.advance() - {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", "", "e1cac555f3d720f611c47eec93e84bd9")}, - {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", " -L 1", "6e2558317d409195eab3006dc9e5524c")}, + {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", "", "d7f23fd77d7dc7cb50d3397f644c6d8a")}, + {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", " -L 1", "c601db95b20248d012b0085347fcb6d1")}, {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", " -L unmapped", "2d32440e47e8d9d329902fe573ad94ce")}, - {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", " -L 1 -L unmapped", "6e2558317d409195eab3006dc9e5524c")}, + {new PRTest(b37KGReference, "unmappedFlagReadsInLastLinearBin.bam", " -L 1 -L unmapped", "c601db95b20248d012b0085347fcb6d1")}, {new PRTest(b37KGReference, "oneReadAllInsertion.bam", "", "349650b6aa9e574b48a2a62627f37c7d")}, {new PRTest(b37KGReference, "NA12878.1_10mb_2_10mb.bam", "", "0c1cbe67296637a85e80e7a182f828ab")} }; diff --git a/public/java/test/org/broadinstitute/sting/utils/MathUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/MathUtilsUnitTest.java index 27af8ec68..3933b3830 100644 --- a/public/java/test/org/broadinstitute/sting/utils/MathUtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/MathUtilsUnitTest.java @@ -25,6 +25,7 @@ package org.broadinstitute.sting.utils; +import cern.jet.random.Normal; import org.broadinstitute.sting.BaseTest; import org.testng.Assert; import org.testng.annotations.BeforeClass; @@ -40,6 +41,35 @@ public class MathUtilsUnitTest extends BaseTest { public void init() { } + /** + * Tests that we get unqiue values for the valid (non-null-producing) input space for {@link MathUtils#fastGenerateUniqueHashFromThreeIntegers(int, int, int)}. + */ + @Test + public void testGenerateUniqueHashFromThreePositiveIntegers() { + logger.warn("Executing testGenerateUniqueHashFromThreePositiveIntegers"); + + final Set observedLongs = new HashSet(); + for (short i = 0; i < Byte.MAX_VALUE; i++) { + for (short j = 0; j < Byte.MAX_VALUE; j++) { + for (short k = 0; k < Byte.MAX_VALUE; k++) { + final Long aLong = MathUtils.fastGenerateUniqueHashFromThreeIntegers(i, j, k); + //System.out.println(String.format("%s, %s, %s: %s", i, j, k, aLong)); + Assert.assertTrue(observedLongs.add(aLong)); + } + } + } + + for (short i = Byte.MAX_VALUE; i <= Short.MAX_VALUE && i > 0; i += 128) { + for (short j = Byte.MAX_VALUE; j <= Short.MAX_VALUE && j > 0; j += 128) { + for (short k = Byte.MAX_VALUE; k <= Short.MAX_VALUE && k > 0; k += 128) { + final Long aLong = MathUtils.fastGenerateUniqueHashFromThreeIntegers(i, j, k); + // System.out.println(String.format("%s, %s, %s: %s", i, j, k, aLong)); + Assert.assertTrue(observedLongs.add(aLong)); + } + } + } + } + /** * Tests that we get the right values from the binomial distribution */ @@ -63,13 +93,15 @@ public class MathUtilsUnitTest extends BaseTest { public void testCumulativeBinomialProbability() { logger.warn("Executing testCumulativeBinomialProbability"); - final int numTrials = 10; - for ( int i = 0; i < numTrials; i++ ) - Assert.assertEquals(MathUtils.binomialCumulativeProbability(numTrials, i, i), MathUtils.binomialProbability(numTrials, i), 1e-10, String.format("k=%d, n=%d", i, numTrials)); - - Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 2), 0.05468750, 1e-7); - Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 5), 0.62304687, 1e-7); - Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 10), 1.0, 1e-7); + for (int j = 0; j < 2; j++) { // Test memoizing functionality, as well. + final int numTrials = 10; + for ( int i = 0; i < numTrials; i++ ) + Assert.assertEquals(MathUtils.binomialCumulativeProbability(numTrials, i, i), MathUtils.binomialProbability(numTrials, i), 1e-10, String.format("k=%d, n=%d", i, numTrials)); + + Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 2), 0.05468750, 1e-7); + Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 5), 0.62304687, 1e-7); + Assert.assertEquals(MathUtils.binomialCumulativeProbability(10, 0, 10), 1.0, 1e-7); + } } /** @@ -398,4 +430,20 @@ public class MathUtilsUnitTest extends BaseTest { Assert.assertEquals(MathUtils.logDotProduct(new double[]{-5.0,-3.0,2.0}, new double[]{6.0,7.0,8.0}),10.0,1e-3); Assert.assertEquals(MathUtils.logDotProduct(new double[]{-5.0}, new double[]{6.0}),1.0,1e-3); } + + @Test + public void testNormalDistribution() { + final double requiredPrecision = 1E-10; + + final Normal n = new Normal(0.0, 1.0, null); + for( final double mu : new double[]{-5.0, -3.2, -1.5, 0.0, 1.2, 3.0, 5.8977} ) { + for( final double sigma : new double[]{1.2, 3.0, 5.8977} ) { + for( final double x : new double[]{-5.0, -3.2, -1.5, 0.0, 1.2, 3.0, 5.8977} ) { + n.setState(mu, sigma); + Assert.assertEquals(n.pdf(x), MathUtils.normalDistribution(mu, sigma, x), requiredPrecision); + Assert.assertEquals(Math.log10(n.pdf(x)), MathUtils.normalDistributionLog10(mu, sigma, x), requiredPrecision); + } + } + } + } } diff --git a/public/java/test/org/broadinstitute/sting/utils/activeregion/ActiveRegionUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/activeregion/ActiveRegionUnitTest.java index ad5fd3642..0f9b8531a 100644 --- a/public/java/test/org/broadinstitute/sting/utils/activeregion/ActiveRegionUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/activeregion/ActiveRegionUnitTest.java @@ -144,7 +144,7 @@ public class ActiveRegionUnitTest extends BaseTest { } @Test(enabled = !DEBUG, dataProvider = "ActiveRegionReads") - public void testActiveRegionReads(final GenomeLoc loc, final GATKSAMRecord read) { + public void testActiveRegionReads(final GenomeLoc loc, final GATKSAMRecord read) throws Exception { final GenomeLoc expectedSpan = loc.union(genomeLocParser.createGenomeLoc(read)); final ActiveRegion region = new ActiveRegion(loc, null, true, genomeLocParser, 0); @@ -176,19 +176,31 @@ public class ActiveRegionUnitTest extends BaseTest { Assert.assertEquals(region.getReadSpanLoc(), expectedSpan); Assert.assertTrue(region.equalExceptReads(region2)); - region.removeAll(Collections.emptyList()); + region.removeAll(Collections.emptySet()); Assert.assertEquals(region.getReads(), Collections.singletonList(read)); Assert.assertEquals(region.size(), 1); Assert.assertEquals(region.getExtendedLoc(), loc); Assert.assertEquals(region.getReadSpanLoc(), expectedSpan); Assert.assertTrue(region.equalExceptReads(region2)); - region.removeAll(Collections.singletonList(read)); + region.removeAll(Collections.singleton(read)); Assert.assertEquals(region.getReads(), Collections.emptyList()); Assert.assertEquals(region.size(), 0); Assert.assertEquals(region.getExtendedLoc(), loc); Assert.assertEquals(region.getReadSpanLoc(), loc); Assert.assertTrue(region.equalExceptReads(region2)); + + final GATKSAMRecord read2 = (GATKSAMRecord)read.clone(); + read2.setReadName(read.getReadName() + ".clone"); + + for ( final GATKSAMRecord readToKeep : Arrays.asList(read, read2)) { + region.addAll(Arrays.asList(read, read2)); + final GATKSAMRecord readToDiscard = readToKeep == read ? read2 : read; + region.removeAll(Collections.singleton(readToDiscard)); + Assert.assertEquals(region.getReads(), Arrays.asList(readToKeep)); + Assert.assertEquals(region.size(), 1); + Assert.assertEquals(region.getExtendedLoc(), loc); + } } // ----------------------------------------------------------------------------------------------- diff --git a/public/java/test/org/broadinstitute/sting/utils/activeregion/ActivityProfileUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/activeregion/ActivityProfileUnitTest.java index 9be250b8e..f208815f7 100644 --- a/public/java/test/org/broadinstitute/sting/utils/activeregion/ActivityProfileUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/activeregion/ActivityProfileUnitTest.java @@ -450,7 +450,7 @@ public class ActivityProfileUnitTest extends BaseTest { private double[] makeGaussian(final int mean, final int range, final double sigma) { final double[] gauss = new double[range]; for( int iii = 0; iii < range; iii++ ) { - gauss[iii] = MathUtils.NormalDistribution(mean, sigma, iii) + ActivityProfile.ACTIVE_PROB_THRESHOLD; + gauss[iii] = MathUtils.normalDistribution(mean, sigma, iii) + ActivityProfile.ACTIVE_PROB_THRESHOLD; } return gauss; } diff --git a/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperTestUtils.java b/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperTestUtils.java index 0e0f6322e..cbbc8252b 100644 --- a/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperTestUtils.java +++ b/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperTestUtils.java @@ -28,8 +28,8 @@ package org.broadinstitute.sting.utils.clipping; import net.sf.samtools.Cigar; import net.sf.samtools.CigarElement; import net.sf.samtools.CigarOperator; +import net.sf.samtools.TextCigarCodec; import org.broadinstitute.sting.utils.Utils; -import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.testng.Assert; @@ -38,13 +38,6 @@ import java.util.LinkedList; import java.util.List; import java.util.Stack; -/** - * Created by IntelliJ IDEA. - * User: roger - * Date: 11/27/11 - * Time: 6:45 AM - * To change this template use File | Settings | File Templates. - */ public class ReadClipperTestUtils { //Should contain all the utils needed for tests to mass produce //reads, cigars, and other needed classes @@ -236,78 +229,6 @@ public class ReadClipperTestUtils { } public static Cigar cigarFromString(String cigarString) { - Cigar cigar = new Cigar(); - - boolean isNumber = false; - int number = 0; - for (int i = 0; i < cigarString.length(); i++) { - char x = cigarString.charAt(i); - - if (x >= '0' && x <='9') { - if (isNumber) { - number *= 10; - } - else { - isNumber = true; - } - number += x - '0'; - } - - else { - CigarElement e; - switch (x) { - case 'M': - case 'm': - e = new CigarElement(number, CigarOperator.M); - break; - - case 'I': - case 'i': - e = new CigarElement(number, CigarOperator.I); - break; - - case 'D': - case 'd': - e = new CigarElement(number, CigarOperator.D); - break; - - case 'S': - case 's': - e = new CigarElement(number, CigarOperator.S); - break; - - case 'N': - case 'n': - e = new CigarElement(number, CigarOperator.N); - break; - - case 'H': - case 'h': - e = new CigarElement(number, CigarOperator.H); - break; - - case 'P': - case 'p': - e = new CigarElement(number, CigarOperator.P); - break; - - case '=': - e = new CigarElement(number, CigarOperator.EQ); - break; - - case 'X': - case 'x': - e = new CigarElement(number, CigarOperator.X); - break; - - default: - throw new ReviewedStingException("Unrecognized cigar operator: " + x + " (number: " + number + ")"); - } - cigar.add(e); - } - } - return cigar; + return TextCigarCodec.getSingleton().decode(cigarString); } - - } diff --git a/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperUnitTest.java index 6ec4336b0..d6bd0d4d2 100644 --- a/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/clipping/ReadClipperUnitTest.java @@ -46,16 +46,17 @@ import java.util.List; * Date: 9/28/11 */ public class ReadClipperUnitTest extends BaseTest { + private final static boolean DEBUG = false; List cigarList; - int maximumCigarSize = 6; // 6 is the minimum necessary number to try all combinations of cigar types with guarantee of clipping an element with length = 2 + int maximumCigarSize = 10; // 6 is the minimum necessary number to try all combinations of cigar types with guarantee of clipping an element with length = 2 @BeforeClass public void init() { cigarList = ReadClipperTestUtils.generateCigarList(maximumCigarSize); } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipBothEndsByReferenceCoordinates() { for (Cigar cigar : cigarList) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); @@ -71,7 +72,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipByReadCoordinates() { for (Cigar cigar : cigarList) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); @@ -101,7 +102,7 @@ public class ReadClipperUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "ClippedReadLengthData", enabled = true) + @Test(dataProvider = "ClippedReadLengthData", enabled = !DEBUG) public void testHardClipReadLengthIsRight(final int originalReadLength, final int nToClip) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(originalReadLength + "M"); read.getReadLength(); // provoke the caching of the read length @@ -112,7 +113,7 @@ public class ReadClipperUnitTest extends BaseTest { clipped.getReadLength(), clipped.getCigar(), expectedReadLength, nToClip, read.getReadLength(), read.getCigar())); } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipByReferenceCoordinates() { for (Cigar cigar : cigarList) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); @@ -135,7 +136,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipByReferenceCoordinatesLeftTail() { for (Cigar cigar : cigarList) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); @@ -154,7 +155,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipByReferenceCoordinatesRightTail() { for (Cigar cigar : cigarList) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); @@ -172,7 +173,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipLowQualEnds() { final byte LOW_QUAL = 2; final byte HIGH_QUAL = 30; @@ -216,7 +217,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipSoftClippedBases() { for (Cigar cigar : cigarList) { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); @@ -251,7 +252,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testRevertSoftClippedBases() { for (Cigar cigar : cigarList) { final int leadingSoftClips = leadingCigarElementLength(cigar, CigarOperator.SOFT_CLIP); @@ -273,7 +274,7 @@ public class ReadClipperUnitTest extends BaseTest { } } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testRevertSoftClippedBasesWithThreshold() { for (Cigar cigar : cigarList) { final int leadingSoftClips = leadingCigarElementLength(cigar, CigarOperator.SOFT_CLIP); @@ -292,6 +293,40 @@ public class ReadClipperUnitTest extends BaseTest { } } + @DataProvider(name = "RevertSoftClipsBeforeContig") + public Object[][] makeRevertSoftClipsBeforeContig() { + List tests = new ArrayList<>(); + + // this functionality can be adapted to provide input data for whatever you might want in your data + for ( int softStart : Arrays.asList(-10, -1, 0) ) { + for ( int alignmentStart : Arrays.asList(1, 10) ) { + tests.add(new Object[]{softStart, alignmentStart}); + } + } + + return tests.toArray(new Object[][]{}); + } + + @Test(enabled = true, dataProvider = "RevertSoftClipsBeforeContig") + public void testRevertSoftClippedBasesBeforeStartOfContig(final int softStart, final int alignmentStart) { + final int nMatches = 10; + final int nSoft = -1 * (softStart - alignmentStart); + final String cigar = nSoft + "S" + nMatches + "M"; + final GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar(cigar); + read.setAlignmentStart(alignmentStart); + + Assert.assertEquals(read.getSoftStart(), softStart); + Assert.assertEquals(read.getAlignmentStart(), alignmentStart); + Assert.assertEquals(read.getCigarString(), cigar); + + final GATKSAMRecord reverted = ReadClipper.revertSoftClippedBases(read); + + final int expectedAlignmentStart = 1; + final String expectedCigar = (1 - softStart) + "H" + read.getAlignmentEnd() + "M"; + Assert.assertEquals(reverted.getSoftStart(), expectedAlignmentStart); + Assert.assertEquals(reverted.getAlignmentStart(), expectedAlignmentStart); + Assert.assertEquals(reverted.getCigarString(), expectedCigar); + } private void assertNoLowQualBases(GATKSAMRecord read, byte low_qual) { if (!read.isEmpty()) { @@ -375,7 +410,7 @@ public class ReadClipperUnitTest extends BaseTest { } - @Test(enabled = true) + @Test(enabled = !DEBUG) public void testHardClipReducedRead() { GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar("10M"); final int[] counts = new int[read.getReadLength()]; @@ -391,4 +426,11 @@ public class ReadClipperUnitTest extends BaseTest { } } + @Test(enabled = !DEBUG) + public void testRevertEntirelySoftclippedReads() { + GATKSAMRecord read = ReadClipperTestUtils.makeReadFromCigar("2H1S3H"); + GATKSAMRecord clippedRead = ReadClipper.revertSoftClippedBases(read); + Assert.assertEquals(clippedRead.getAlignmentStart(), read.getSoftStart()); + } + } \ No newline at end of file diff --git a/public/java/test/org/broadinstitute/sting/utils/fragments/FragmentUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/fragments/FragmentUtilsUnitTest.java index e9600480a..0886427ca 100644 --- a/public/java/test/org/broadinstitute/sting/utils/fragments/FragmentUtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/fragments/FragmentUtilsUnitTest.java @@ -26,6 +26,7 @@ package org.broadinstitute.sting.utils.fragments; import net.sf.samtools.SAMFileHeader; +import net.sf.samtools.TextCigarCodec; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.pileup.PileupElement; @@ -296,4 +297,51 @@ public class FragmentUtilsUnitTest extends BaseTest { final GATKSAMRecord actual = FragmentUtils.mergeOverlappingPairedFragments(read1, read2); Assert.assertNull(actual); } + + @DataProvider(name = "MergeFragmentsOffContig") + public Object[][] makeMergeFragmentsOffContig() throws Exception { + List tests = new ArrayList<>(); + + for ( final int pre1 : Arrays.asList(0, 50)) { + for ( final int post1 : Arrays.asList(0, 50)) { + for ( final int pre2 : Arrays.asList(0, 50)) { + for ( final int post2 : Arrays.asList(0, 50)) { + tests.add(new Object[]{pre1, post1, pre2, post2}); + } + } + } + } + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "MergeFragmentsOffContig") + public void testMergeFragmentsOffContig(final int pre1, final int post1, final int pre2, final int post2) { + final int contigSize = 10; + final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 0, contigSize); + + final GATKSAMRecord read1 = createReadOffContig(header, false, pre1, post1); + final GATKSAMRecord read2 = createReadOffContig(header, true, pre2, post2); + + final GATKSAMRecord merged = FragmentUtils.mergeOverlappingPairedFragments(read1, read2); + } + + private GATKSAMRecord createReadOffContig(final SAMFileHeader header, final boolean negStrand, final int pre, final int post) { + final int contigLen = header.getSequence(0).getSequenceLength(); + final int readLen = pre + contigLen + post; + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read1", 0, 1, readLen); + read.setAlignmentStart(1); + read.setCigar(TextCigarCodec.getSingleton().decode(pre + "S" + contigLen + "M" + post + "S")); + read.setBaseQualities(Utils.dupBytes((byte) 30, readLen)); + read.setReadBases(Utils.dupBytes((byte)'A', readLen)); + read.setMappingQuality(60); + read.setMateAlignmentStart(1); + read.setProperPairFlag(true); + read.setReadPairedFlag(true); + read.setInferredInsertSize(30); + read.setReadNegativeStrandFlag(negStrand); + read.setMateNegativeStrandFlag(! negStrand); + read.setReadGroup(new GATKSAMReadGroupRecord("foo")); + return read; + } } diff --git a/public/java/test/org/broadinstitute/sting/utils/sam/AlignmentUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/sam/AlignmentUtilsUnitTest.java index e7d54c460..fbf0242a3 100644 --- a/public/java/test/org/broadinstitute/sting/utils/sam/AlignmentUtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/sam/AlignmentUtilsUnitTest.java @@ -1033,5 +1033,12 @@ public class AlignmentUtilsUnitTest { Assert.assertEquals(AlignmentUtils.startsOrEndsWithInsertionOrDeletion(TextCigarCodec.getSingleton().decode(cigar)), expected); } + @Test(dataProvider = "StartsOrEndsWithInsertionOrDeletionData", enabled = true) + public void testRemoveTrailingDeletions(final String cigar, final boolean expected) { + final Cigar originalCigar = TextCigarCodec.getSingleton().decode(cigar); + final Cigar newCigar = AlignmentUtils.removeTrailingDeletions(originalCigar); + + Assert.assertEquals(originalCigar.equals(newCigar), !cigar.endsWith("D")); + } } diff --git a/public/java/test/org/broadinstitute/sting/utils/variant/GATKVCFUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/variant/GATKVCFUtilsUnitTest.java new file mode 100644 index 000000000..051d0bcec --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/utils/variant/GATKVCFUtilsUnitTest.java @@ -0,0 +1,86 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.utils.variant; + +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; +import org.broadinstitute.sting.gatk.contexts.AlignmentContext; +import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; +import org.broadinstitute.sting.gatk.walkers.RodWalker; +import org.broadinstitute.sting.gatk.walkers.Walker; +import org.broadinstitute.variant.vcf.VCFHeader; +import org.broadinstitute.variant.vcf.VCFHeaderLine; +import org.testng.Assert; +import org.testng.annotations.Test; + +import java.util.Arrays; +import java.util.Collections; +import java.util.Set; + +public class GATKVCFUtilsUnitTest extends BaseTest { + public static class VCFHeaderTestWalker extends RodWalker { + public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) { return null; } + public Integer reduceInit() { return 0; } + public Integer reduce(Integer value, Integer sum) { return value + sum; } + } + + public static class VCFHeaderTest2Walker extends VCFHeaderTestWalker {} + + @Test + public void testAddingVCFHeaderInfo() { + final VCFHeader header = new VCFHeader(); + + final Walker walker1 = new VCFHeaderTestWalker(); + final Walker walker2 = new VCFHeaderTest2Walker(); + + final GenomeAnalysisEngine testEngine1 = new GenomeAnalysisEngine(); + testEngine1.setWalker(walker1); + + final GenomeAnalysisEngine testEngine2 = new GenomeAnalysisEngine(); + testEngine2.setWalker(walker2); + + final VCFHeaderLine line1 = GATKVCFUtils.getCommandLineArgumentHeaderLine(testEngine1, Collections.EMPTY_LIST); + logger.warn(line1); + Assert.assertNotNull(line1); + Assert.assertEquals(line1.getKey(), GATKVCFUtils.GATK_COMMAND_LINE_KEY); + for ( final String field : Arrays.asList("Version", "ID", "Date", "CommandLineOptions")) + Assert.assertTrue(line1.toString().contains(field), "Couldn't find field " + field + " in " + line1.getValue()); + Assert.assertTrue(line1.toString().contains("ID=" + testEngine1.getWalkerName())); + + final VCFHeaderLine line2 = GATKVCFUtils.getCommandLineArgumentHeaderLine(testEngine2, Collections.EMPTY_LIST); + logger.warn(line2); + + header.addMetaDataLine(line1); + final Set lines1 = header.getMetaDataInInputOrder(); + Assert.assertTrue(lines1.contains(line1)); + + header.addMetaDataLine(line2); + final Set lines2 = header.getMetaDataInInputOrder(); + Assert.assertTrue(lines2.contains(line1)); + Assert.assertTrue(lines2.contains(line2)); + } +} \ No newline at end of file diff --git a/public/java/test/org/broadinstitute/sting/utils/variant/GATKVariantContextUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/variant/GATKVariantContextUtilsUnitTest.java index fcc7c7998..937698d82 100644 --- a/public/java/test/org/broadinstitute/sting/utils/variant/GATKVariantContextUtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/variant/GATKVariantContextUtilsUnitTest.java @@ -28,6 +28,7 @@ package org.broadinstitute.sting.utils.variant; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.BaseUtils; +import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.variant.variantcontext.*; @@ -39,6 +40,7 @@ import org.testng.annotations.Test; import java.util.*; public class GATKVariantContextUtilsUnitTest extends BaseTest { + private final static boolean DEBUG = false; Allele Aref, T, C, G, Cref, ATC, ATCATC; @@ -168,7 +170,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return MergeAllelesTest.getTests(MergeAllelesTest.class); } - @Test(dataProvider = "mergeAlleles") + @Test(enabled = !DEBUG, dataProvider = "mergeAlleles") public void testMergeAlleles(MergeAllelesTest cfg) { final List inputs = new ArrayList(); @@ -229,7 +231,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return SimpleMergeRSIDTest.getTests(SimpleMergeRSIDTest.class); } - @Test(dataProvider = "simplemergersiddata") + @Test(enabled = !DEBUG, dataProvider = "simplemergersiddata") public void testRSIDMerge(SimpleMergeRSIDTest cfg) { VariantContext snpVC1 = makeVC("snpvc1", Arrays.asList(Aref, T)); final List inputs = new ArrayList(); @@ -352,7 +354,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return MergeFilteredTest.getTests(MergeFilteredTest.class); } - @Test(dataProvider = "mergeFiltered") + @Test(enabled = !DEBUG, dataProvider = "mergeFiltered") public void testMergeFiltered(MergeFilteredTest cfg) { final List priority = vcs2priority(cfg.inputs); final VariantContext merged = GATKVariantContextUtils.simpleMerge( @@ -479,7 +481,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return MergeGenotypesTest.getTests(MergeGenotypesTest.class); } - @Test(dataProvider = "mergeGenotypes") + @Test(enabled = !DEBUG, dataProvider = "mergeGenotypes") public void testMergeGenotypes(MergeGenotypesTest cfg) { final VariantContext merged = GATKVariantContextUtils.simpleMerge( cfg.inputs, cfg.priority, GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED, @@ -517,7 +519,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { } } - @Test + @Test(enabled = !DEBUG) public void testMergeGenotypesUniquify() { final VariantContext vc1 = makeVC("1", Arrays.asList(Aref, T), makeG("s1", Aref, T, -1)); final VariantContext vc2 = makeVC("2", Arrays.asList(Aref, T), makeG("s1", Aref, T, -2)); @@ -547,7 +549,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { // // -------------------------------------------------------------------------------- - @Test + @Test(enabled = !DEBUG) public void testAnnotationSet() { for ( final boolean annotate : Arrays.asList(true, false)) { for ( final String set : Arrays.asList("set", "combine", "x")) { @@ -618,7 +620,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return ReverseClippingPositionTestProvider.getTests(ReverseClippingPositionTestProvider.class); } - @Test(dataProvider = "ReverseClippingPositionTestProvider") + @Test(enabled = !DEBUG, dataProvider = "ReverseClippingPositionTestProvider") public void testReverseClippingPositionTestProvider(ReverseClippingPositionTestProvider cfg) { int result = GATKVariantContextUtils.computeReverseClipping(cfg.alleles, cfg.ref.getBytes()); Assert.assertEquals(result, cfg.expectedClip); @@ -706,7 +708,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "SplitBiallelics") + @Test(enabled = !DEBUG, dataProvider = "SplitBiallelics") public void testSplitBiallelicsNoGenotypes(final VariantContext vc, final List expectedBiallelics) { final List biallelics = GATKVariantContextUtils.splitVariantContextToBiallelics(vc); Assert.assertEquals(biallelics.size(), expectedBiallelics.size()); @@ -717,7 +719,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { } } - @Test(dataProvider = "SplitBiallelics", dependsOnMethods = "testSplitBiallelicsNoGenotypes") + @Test(enabled = !DEBUG, dataProvider = "SplitBiallelics", dependsOnMethods = "testSplitBiallelicsNoGenotypes") public void testSplitBiallelicsGenotypes(final VariantContext vc, final List expectedBiallelics) { final List genotypes = new ArrayList(); @@ -745,7 +747,6 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { } } - // -------------------------------------------------------------------------------- // // Test repeats @@ -810,14 +811,14 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return RepeatDetectorTest.getTests(RepeatDetectorTest.class); } - @Test(dataProvider = "RepeatDetectorTest") + @Test(enabled = !DEBUG, dataProvider = "RepeatDetectorTest") public void testRepeatDetectorTest(RepeatDetectorTest cfg) { // test alleles are equal Assert.assertEquals(GATKVariantContextUtils.isTandemRepeat(cfg.vc, cfg.ref.getBytes()), cfg.isTrueRepeat); } - @Test + @Test(enabled = !DEBUG) public void testRepeatAllele() { Allele nullR = Allele.create("A", true); Allele nullA = Allele.create("A", false); @@ -940,7 +941,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "ForwardClippingData") + @Test(enabled = !DEBUG, dataProvider = "ForwardClippingData") public void testForwardClipping(final List alleleStrings, final int expectedClip) { final List alleles = new LinkedList(); for ( final String alleleString : alleleStrings ) @@ -975,7 +976,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "ClipAlleleTest") + @Test(enabled = !DEBUG, dataProvider = "ClipAlleleTest") public void testClipAlleles(final List alleleStrings, final List expected, final int numLeftClipped) { final int start = 10; final VariantContext unclipped = GATKVariantContextUtils.makeFromAlleles("test", "20", start, alleleStrings); @@ -1019,7 +1020,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "PrimitiveAlleleSplittingData") + @Test(enabled = !DEBUG, dataProvider = "PrimitiveAlleleSplittingData") public void testPrimitiveAlleleSplitting(final String ref, final String alt, final int expectedSplit, final List variantPositions) { final int start = 10; @@ -1066,7 +1067,7 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "AlleleRemappingData") + @Test(enabled = !DEBUG, dataProvider = "AlleleRemappingData") public void testAlleleRemapping(final Map alleleMap, final int numGenotypes) { final GATKVariantContextUtils.AlleleMapper alleleMapper = new GATKVariantContextUtils.AlleleMapper(alleleMap); @@ -1102,4 +1103,204 @@ public class GATKVariantContextUtilsUnitTest extends BaseTest { return gc; } + + // -------------------------------------------------------------------------------- + // + // Test subsetDiploidAlleles + // + // -------------------------------------------------------------------------------- + + @DataProvider(name = "subsetDiploidAllelesData") + public Object[][] makesubsetDiploidAllelesData() { + List tests = new ArrayList<>(); + + final Allele A = Allele.create("A", true); + final Allele C = Allele.create("C"); + final Allele G = Allele.create("G"); + + final List AA = Arrays.asList(A,A); + final List AC = Arrays.asList(A,C); + final List CC = Arrays.asList(C,C); + final List AG = Arrays.asList(A,G); + final List CG = Arrays.asList(C,G); + final List GG = Arrays.asList(G,G); + final List ACG = Arrays.asList(A,C,G); + + final VariantContext vcBase = new VariantContextBuilder("test", "20", 10, 10, AC).make(); + + final double[] homRefPL = MathUtils.normalizeFromRealSpace(new double[]{0.9, 0.09, 0.01}); + final double[] hetPL = MathUtils.normalizeFromRealSpace(new double[]{0.09, 0.9, 0.01}); + final double[] homVarPL = MathUtils.normalizeFromRealSpace(new double[]{0.01, 0.09, 0.9}); + final double[] uninformative = new double[]{0, 0, 0}; + + final Genotype base = new GenotypeBuilder("NA12878").DP(10).GQ(50).make(); + + // make sure we don't screw up the simple case + final Genotype aaGT = new GenotypeBuilder(base).alleles(AA).AD(new int[]{10,2}).PL(homRefPL).GQ(8).make(); + final Genotype acGT = new GenotypeBuilder(base).alleles(AC).AD(new int[]{10,2}).PL(hetPL).GQ(8).make(); + final Genotype ccGT = new GenotypeBuilder(base).alleles(CC).AD(new int[]{10,2}).PL(homVarPL).GQ(8).make(); + + tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(aaGT).make(), AC, Arrays.asList(new GenotypeBuilder(aaGT).noAD().make())}); + tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(acGT).make(), AC, Arrays.asList(new GenotypeBuilder(acGT).noAD().make())}); + tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(ccGT).make(), AC, Arrays.asList(new GenotypeBuilder(ccGT).noAD().make())}); + + // uninformative test case + final Genotype uninformativeGT = new GenotypeBuilder(base).alleles(CC).noAD().PL(uninformative).GQ(0).make(); + final Genotype emptyGT = new GenotypeBuilder(base).alleles(GATKVariantContextUtils.NO_CALL_ALLELES).noAD().noPL().noGQ().make(); + tests.add(new Object[]{new VariantContextBuilder(vcBase).genotypes(uninformativeGT).make(), AC, Arrays.asList(emptyGT)}); + + // actually subsetting down from multiple alt values + final double[] homRef3AllelesPL = new double[]{0, -10, -20, -30, -40, -50}; + final double[] hetRefC3AllelesPL = new double[]{-10, 0, -20, -30, -40, -50}; + final double[] homC3AllelesPL = new double[]{-20, -10, 0, -30, -40, -50}; + final double[] hetRefG3AllelesPL = new double[]{-20, -10, -30, 0, -40, -50}; + final double[] hetCG3AllelesPL = new double[]{-20, -10, -30, -40, 0, -50}; // AA, AC, CC, AG, CG, GG + final double[] homG3AllelesPL = new double[]{-20, -10, -30, -40, -50, 0}; // AA, AC, CC, AG, CG, GG + tests.add(new Object[]{ + new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).noAD().PL(homRef3AllelesPL).make()).make(), + AC, + Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{0, -10, -20}).noAD().GQ(100).make())}); + + tests.add(new Object[]{ + new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).noAD().PL(hetRefC3AllelesPL).make()).make(), + AC, + Arrays.asList(new GenotypeBuilder(base).alleles(AC).PL(new double[]{-10, 0, -20}).noAD().GQ(100).make())}); + + tests.add(new Object[]{ + new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).noAD().PL(homC3AllelesPL).make()).make(), + AC, + Arrays.asList(new GenotypeBuilder(base).alleles(CC).PL(new double[]{-20, -10, 0}).noAD().GQ(100).make())}); + tests.add(new Object[]{ + new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).noAD().PL(hetRefG3AllelesPL).make()).make(), + AG, + Arrays.asList(new GenotypeBuilder(base).alleles(AG).PL(new double[]{-20, 0, -50}).noAD().GQ(200).make())}); + + // wow, scary -- bad output but discussed with Eric and we think this is the only thing that can be done + tests.add(new Object[]{ + new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).noAD().PL(hetCG3AllelesPL).make()).make(), + AG, + Arrays.asList(new GenotypeBuilder(base).alleles(AA).PL(new double[]{0, -20, -30}).noAD().GQ(200).make())}); + + tests.add(new Object[]{ + new VariantContextBuilder(vcBase).alleles(ACG).genotypes(new GenotypeBuilder(base).alleles(AA).noAD().PL(homG3AllelesPL).make()).make(), + AG, + Arrays.asList(new GenotypeBuilder(base).alleles(GG).PL(new double[]{-20, -40, 0}).noAD().GQ(200).make())}); + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "subsetDiploidAllelesData") + public void testsubsetDiploidAllelesData(final VariantContext inputVC, + final List allelesToUse, + final List expectedGenotypes) { + final GenotypesContext actual = GATKVariantContextUtils.subsetDiploidAlleles(inputVC, allelesToUse, GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN); + + Assert.assertEquals(actual.size(), expectedGenotypes.size()); + for ( final Genotype expected : expectedGenotypes ) { + final Genotype actualGT = actual.get(expected.getSampleName()); + Assert.assertNotNull(actualGT); + assertGenotypesAreEqual(actualGT, expected); + } + } + + @DataProvider(name = "UpdateGenotypeAfterSubsettingData") + public Object[][] makeUpdateGenotypeAfterSubsettingData() { + List tests = new ArrayList(); + + final Allele A = Allele.create("A", true); + final Allele C = Allele.create("C"); + final Allele G = Allele.create("G"); + + final List AA = Arrays.asList(A,A); + final List AC = Arrays.asList(A,C); + final List CC = Arrays.asList(C,C); + final List AG = Arrays.asList(A,G); + final List CG = Arrays.asList(C,G); + final List GG = Arrays.asList(G,G); + final List ACG = Arrays.asList(A,C,G); + final List> allSubsetAlleles = Arrays.asList(AC,AG,ACG); + + final double[] homRefPL = new double[]{0.9, 0.09, 0.01}; + final double[] hetPL = new double[]{0.09, 0.9, 0.01}; + final double[] homVarPL = new double[]{0.01, 0.09, 0.9}; + final double[] uninformative = new double[]{0.33, 0.33, 0.33}; + final List allPLs = Arrays.asList(homRefPL, hetPL, homVarPL, uninformative); + + for ( final List alleles : allSubsetAlleles ) { + for ( final double[] pls : allPLs ) { + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.SET_TO_NO_CALL, pls, AA, alleles, GATKVariantContextUtils.NO_CALL_ALLELES}); + } + } + + for ( final List originalGT : Arrays.asList(AA, AC, CC, AG, CG, GG) ) { + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, homRefPL, originalGT, AC, AA}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, hetPL, originalGT, AC, AC}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, homVarPL, originalGT, AC, CC}); +// tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.USE_PLS_TO_ASSIGN, uninformative, AA, AC, GATKVariantContextUtils.NO_CALL_ALLELES}); + } + + for ( final double[] pls : allPLs ) { + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AA, AC, AA}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AC, AC, AC}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CC, AC, CC}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CG, AC, AC}); + + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AA, AG, AA}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AC, AG, AA}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CC, AG, AA}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CG, AG, AG}); + + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AA, ACG, AA}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AC, ACG, AC}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CC, ACG, CC}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, AG, ACG, AG}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, CG, ACG, CG}); + tests.add(new Object[]{GATKVariantContextUtils.GenotypeAssignmentMethod.BEST_MATCH_TO_ORIGINAL, pls, GG, ACG, GG}); + } + + return tests.toArray(new Object[][]{}); + } + + @Test(enabled = !DEBUG, dataProvider = "UpdateGenotypeAfterSubsettingData") + public void testUpdateGenotypeAfterSubsetting(final GATKVariantContextUtils.GenotypeAssignmentMethod mode, + final double[] likelihoods, + final List originalGT, + final List allelesToUse, + final List expectedAlleles) { + final GenotypeBuilder gb = new GenotypeBuilder("test"); + final double[] log10Likelhoods = MathUtils.normalizeFromLog10(likelihoods, true, false); + GATKVariantContextUtils.updateGenotypeAfterSubsetting(originalGT, gb, mode, log10Likelhoods, allelesToUse); + final Genotype g = gb.make(); + Assert.assertEquals(new HashSet<>(g.getAlleles()), new HashSet<>(expectedAlleles)); + } + + @Test(enabled = !DEBUG) + public void testSubsetToRef() { + final Map tests = new LinkedHashMap<>(); + + for ( final List alleles : Arrays.asList(Arrays.asList(Aref), Arrays.asList(C), Arrays.asList(Aref, C), Arrays.asList(Aref, C, C) ) ) { + for ( final String name : Arrays.asList("test1", "test2") ) { + final GenotypeBuilder builder = new GenotypeBuilder(name, alleles); + builder.DP(10); + builder.GQ(30); + builder.AD(alleles.size() == 1 ? new int[]{1} : (alleles.size() == 2 ? new int[]{1, 2} : new int[]{1, 2, 3})); + builder.PL(alleles.size() == 1 ? new int[]{1} : (alleles.size() == 2 ? new int[]{1,2} : new int[]{1,2,3})); + final List refs = Collections.nCopies(alleles.size(), Aref); + tests.put(builder.make(), builder.alleles(refs).noAD().noPL().make()); + } + } + + for ( final int n : Arrays.asList(1, 2, 3) ) { + for ( final List genotypes : Utils.makePermutations(new ArrayList<>(tests.keySet()), n, false) ) { + final VariantContext vc = new VariantContextBuilder("test", "20", 1, 1, Arrays.asList(Aref, C)).genotypes(genotypes).make(); + final GenotypesContext gc = GATKVariantContextUtils.subsetToRefOnly(vc, 2); + + Assert.assertEquals(gc.size(), genotypes.size()); + for ( int i = 0; i < genotypes.size(); i++ ) { +// logger.warn("Testing " + genotypes.get(i) + " => " + gc.get(i) + " " + tests.get(genotypes.get(i))); + assertGenotypesAreEqual(gc.get(i), tests.get(genotypes.get(i))); + } + } + } + } } \ No newline at end of file diff --git a/public/scala/src/org/broadinstitute/sting/queue/extensions/picard/CollectGcBiasMetrics.scala b/public/scala/src/org/broadinstitute/sting/queue/extensions/picard/CollectGcBiasMetrics.scala index 5d887016e..7c4c3f26a 100644 --- a/public/scala/src/org/broadinstitute/sting/queue/extensions/picard/CollectGcBiasMetrics.scala +++ b/public/scala/src/org/broadinstitute/sting/queue/extensions/picard/CollectGcBiasMetrics.scala @@ -52,6 +52,5 @@ class CollectGcBiasMetrics extends org.broadinstitute.sting.queue.function.JavaC override def commandLine = super.commandLine + required("SUMMARY_OUTPUT=" + output) + required("CHART_OUTPUT=" + output+".pdf") + - required("REFERENCE_SEQUENCE=" + reference) + - required("ASSUME_SORTED=true") + required("REFERENCE_SEQUENCE=" + reference) } diff --git a/public/testdata/exampleDBSNP.vcf.idx b/public/testdata/exampleDBSNP.vcf.idx new file mode 100644 index 000000000..7239e366f Binary files /dev/null and b/public/testdata/exampleDBSNP.vcf.idx differ