From b7d59ea13bfc1602a3152269853979d2d1f98d3a Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Mon, 1 Apr 2013 10:43:43 -0400 Subject: [PATCH 01/23] LIBS unit test debugging should be false --- .../utils/locusiterator/LocusIteratorByStateUnitTest.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/public/java/test/org/broadinstitute/sting/utils/locusiterator/LocusIteratorByStateUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/locusiterator/LocusIteratorByStateUnitTest.java index fd87c1c12..d2f29ee7a 100644 --- a/public/java/test/org/broadinstitute/sting/utils/locusiterator/LocusIteratorByStateUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/locusiterator/LocusIteratorByStateUnitTest.java @@ -51,7 +51,7 @@ import java.util.*; * testing of the new (non-legacy) version of LocusIteratorByState */ public class LocusIteratorByStateUnitTest extends LocusIteratorByStateBaseTest { - private static final boolean DEBUG = true; + private static final boolean DEBUG = false; protected LocusIteratorByState li; @Test(enabled = true) @@ -361,7 +361,7 @@ public class LocusIteratorByStateUnitTest extends LocusIteratorByStateBaseTest { // comprehensive LIBS/PileupElement tests // //////////////////////////////////////////// - @DataProvider(name = "LIBSTest") + @DataProvider(name = "MyLIBSTest") public Object[][] makeLIBSTest() { final List tests = new LinkedList(); @@ -377,7 +377,7 @@ public class LocusIteratorByStateUnitTest extends LocusIteratorByStateBaseTest { // Arrays.asList(3)); } - @Test(enabled = true && ! DEBUG, dataProvider = "LIBSTest") + @Test(enabled = ! DEBUG, dataProvider = "MyLIBSTest") public void testLIBS(LIBSTest params) { // create the iterator by state with the fake reads and fake records final GATKSAMRecord read = params.makeRead(); From bff13bb5c56035276646711b318ac479e30074cc Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Wed, 27 Mar 2013 08:34:28 -0400 Subject: [PATCH 02/23] Move Haplotype class to its own package in utils --- .../gatk/walkers/genotyper/ErrorModel.java | 2 +- ...GeneralPloidyIndelGenotypeLikelihoods.java | 2 +- ...elGenotypeLikelihoodsCalculationModel.java | 1 + ...elGenotypeLikelihoodsCalculationModel.java | 2 +- .../haplotypecaller/DeBruijnAssembler.java | 2 +- .../haplotypecaller/GenotypingEngine.java | 20 +++--- .../haplotypecaller/HaplotypeCaller.java | 1 + .../haplotypecaller/HaplotypeResolver.java | 2 +- .../LikelihoodCalculationEngine.java | 2 +- .../haplotypecaller/LocalAssemblyEngine.java | 2 +- .../indels/HaplotypeIndelErrorModel.java | 2 +- .../indels/PairHMMIndelErrorModel.java | 2 +- .../DeBruijnAssemblerUnitTest.java | 2 +- .../GenotypingEngineUnitTest.java | 1 + .../KMerErrorCorrectorUnitTest.java | 67 +++++++------------ .../LikelihoodCalculationEngineUnitTest.java | 4 -- .../utils/{ => haplotype}/Haplotype.java | 3 +- .../AllHaplotypeBAMWriter.java | 5 +- .../CalledHaplotypeBAMWriter.java | 2 +- .../HaplotypeBAMWriter.java | 2 +- .../{ => haplotype}/HaplotypeUnitTest.java | 4 +- .../HaplotypeBAMWriterUnitTest.java | 2 +- 22 files changed, 54 insertions(+), 78 deletions(-) rename public/java/src/org/broadinstitute/sting/utils/{ => haplotype}/Haplotype.java (99%) rename public/java/test/org/broadinstitute/sting/utils/{ => haplotype}/HaplotypeUnitTest.java (98%) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/ErrorModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/ErrorModel.java index 49494ebb0..7ce736b0c 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/ErrorModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/ErrorModel.java @@ -49,7 +49,7 @@ package org.broadinstitute.sting.gatk.walkers.genotyper; import com.google.java.contract.Requires; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.walkers.indels.PairHMMIndelErrorModel; -import org.broadinstitute.sting.utils.Haplotype; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.QualityUtils; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/GeneralPloidyIndelGenotypeLikelihoods.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/GeneralPloidyIndelGenotypeLikelihoods.java index c957bb9db..2f2a93fa4 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/GeneralPloidyIndelGenotypeLikelihoods.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/GeneralPloidyIndelGenotypeLikelihoods.java @@ -49,7 +49,7 @@ package org.broadinstitute.sting.gatk.walkers.genotyper; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.walkers.genotyper.afcalc.ExactACset; import org.broadinstitute.sting.gatk.walkers.indels.PairHMMIndelErrorModel; -import org.broadinstitute.sting.utils.Haplotype; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.pileup.PileupElement; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/GeneralPloidyIndelGenotypeLikelihoodsCalculationModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/GeneralPloidyIndelGenotypeLikelihoodsCalculationModel.java index bd25fb6c5..9c4694955 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/GeneralPloidyIndelGenotypeLikelihoodsCalculationModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/GeneralPloidyIndelGenotypeLikelihoodsCalculationModel.java @@ -53,6 +53,7 @@ import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.walkers.indels.PairHMMIndelErrorModel; import org.broadinstitute.sting.utils.*; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.variant.variantcontext.*; import java.util.*; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java index 858a3370b..8a766ba48 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/genotyper/IndelGenotypeLikelihoodsCalculationModel.java @@ -55,7 +55,7 @@ import org.broadinstitute.sting.gatk.walkers.indels.PairHMMIndelErrorModel; import org.broadinstitute.sting.utils.BaseUtils; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLocParser; -import org.broadinstitute.sting.utils.Haplotype; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.utils.pileup.PileupElement; import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java index 198abeac8..9bc0713c0 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java @@ -55,7 +55,7 @@ import org.apache.commons.lang.ArrayUtils; import org.apache.log4j.Logger; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.*; import org.broadinstitute.sting.utils.GenomeLoc; -import org.broadinstitute.sting.utils.Haplotype; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.SWPairwiseAlignment; import org.broadinstitute.sting.utils.activeregion.ActiveRegion; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java index ee9993b4f..34d81d405 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java @@ -58,9 +58,9 @@ import org.broadinstitute.sting.gatk.walkers.genotyper.UnifiedGenotyperEngine; import org.broadinstitute.sting.utils.*; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.sting.utils.variant.GATKVariantContextUtils; -import org.broadinstitute.sting.utils.BaseUtils; import org.broadinstitute.variant.variantcontext.*; import java.io.PrintStream; @@ -697,15 +697,6 @@ public class GenotypingEngine { return eventAllelesForSample; } - protected static boolean containsVCWithMatchingAlleles( final List list, final VariantContext vcToTest ) { - for( final VariantContext vc : list ) { - if( vc.hasSameAllelesAs(vcToTest) ) { - return true; - } - } - return false; - } - protected static Map generateVCsFromAlignment( final Haplotype haplotype, final int alignmentStartHapwrtRef, final Cigar cigar, final byte[] ref, final byte[] alignment, final GenomeLoc refLoc, final String sourceNameToAdd ) { final Map vcs = new LinkedHashMap(); @@ -794,6 +785,15 @@ public class GenotypingEngine { return vcs; } + protected static boolean containsVCWithMatchingAlleles( final List list, final VariantContext vcToTest ) { + for( final VariantContext vc : list ) { + if( vc.hasSameAllelesAs(vcToTest) ) { + return true; + } + } + return false; + } + protected static class Event { public VariantContext vc; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index da077ff02..d77caa2a2 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -77,6 +77,7 @@ import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; import org.broadinstitute.sting.utils.fragments.FragmentCollection; import org.broadinstitute.sting.utils.fragments.FragmentUtils; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.haplotypeBAMWriter.HaplotypeBAMWriter; import org.broadinstitute.sting.utils.help.DocumentedGATKFeature; import org.broadinstitute.sting.utils.help.HelpConstants; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeResolver.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeResolver.java index facc929cd..03af9b59b 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeResolver.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeResolver.java @@ -58,7 +58,7 @@ import org.broadinstitute.sting.gatk.walkers.Reference; import org.broadinstitute.sting.gatk.walkers.RodWalker; import org.broadinstitute.sting.gatk.walkers.Window; import org.broadinstitute.sting.utils.GenomeLoc; -import org.broadinstitute.sting.utils.Haplotype; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.SWPairwiseAlignment; import org.broadinstitute.sting.utils.help.HelpConstants; import org.broadinstitute.variant.vcf.VCFHeader; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java index 4ea2498c4..df1c9aabc 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java @@ -50,7 +50,7 @@ import com.google.java.contract.Ensures; import com.google.java.contract.Requires; import org.apache.log4j.Logger; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; -import org.broadinstitute.sting.utils.Haplotype; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.QualityUtils; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java index c31405872..23cbc3265 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LocalAssemblyEngine.java @@ -47,7 +47,7 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import org.broadinstitute.sting.utils.GenomeLoc; -import org.broadinstitute.sting.utils.Haplotype; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.activeregion.ActiveRegion; import org.broadinstitute.variant.variantcontext.VariantContext; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/HaplotypeIndelErrorModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/HaplotypeIndelErrorModel.java index f7686bdf5..cd4ea778d 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/HaplotypeIndelErrorModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/HaplotypeIndelErrorModel.java @@ -47,7 +47,7 @@ package org.broadinstitute.sting.gatk.walkers.indels; import net.sf.samtools.SAMRecord; -import org.broadinstitute.sting.utils.Haplotype; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.QualityUtils; import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/PairHMMIndelErrorModel.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/PairHMMIndelErrorModel.java index 4c5490395..a1ce5afdb 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/PairHMMIndelErrorModel.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/indels/PairHMMIndelErrorModel.java @@ -48,7 +48,7 @@ package org.broadinstitute.sting.gatk.walkers.indels; import com.google.java.contract.Ensures; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; -import org.broadinstitute.sting.utils.Haplotype; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.clipping.ReadClipper; import org.broadinstitute.sting.utils.exceptions.UserException; diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssemblerUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssemblerUnitTest.java index 86d331dae..59d13dee4 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssemblerUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssemblerUnitTest.java @@ -57,7 +57,7 @@ import net.sf.samtools.CigarElement; import net.sf.samtools.CigarOperator; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.DeBruijnGraph; -import org.broadinstitute.sting.utils.Haplotype; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.sam.AlignmentUtils; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngineUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngineUnitTest.java index 8b09e91ae..2be42337d 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngineUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngineUnitTest.java @@ -56,6 +56,7 @@ import net.sf.picard.reference.ReferenceSequenceFile; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.utils.*; import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.variant.variantcontext.Allele; import org.broadinstitute.variant.variantcontext.VariantContext; import org.broadinstitute.variant.variantcontext.VariantContextBuilder; diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerErrorCorrectorUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerErrorCorrectorUnitTest.java index a4edfcacc..f8a540b70 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerErrorCorrectorUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerErrorCorrectorUnitTest.java @@ -1,48 +1,27 @@ /* -* By downloading the PROGRAM you agree to the following terms of use: -* -* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY -* -* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). -* -* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and -* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. -* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: -* -* 1. DEFINITIONS -* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. -* -* 2. LICENSE -* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. -* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. -* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. -* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. -* -* 3. OWNERSHIP OF INTELLECTUAL PROPERTY -* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. -* Copyright 2012 Broad Institute, Inc. -* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. -* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. -* -* 4. INDEMNIFICATION -* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. -* -* 5. NO REPRESENTATIONS OR WARRANTIES -* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. -* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. -* -* 6. ASSIGNMENT -* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. -* -* 7. MISCELLANEOUS -* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. -* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. -* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. -* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. -* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. -* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. -* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. -*/ + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngineUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngineUnitTest.java index 58f9a2e74..48c9d3c1a 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngineUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngineUnitTest.java @@ -53,14 +53,10 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; */ import org.broadinstitute.sting.BaseTest; -import org.broadinstitute.sting.utils.Haplotype; import org.broadinstitute.sting.utils.MathUtils; import org.testng.Assert; -import org.testng.annotations.DataProvider; import org.testng.annotations.Test; -import java.util.*; - /** * Unit tests for LikelihoodCalculationEngine */ diff --git a/public/java/src/org/broadinstitute/sting/utils/Haplotype.java b/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java similarity index 99% rename from public/java/src/org/broadinstitute/sting/utils/Haplotype.java rename to public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java index 070ae4f5d..6dc223616 100644 --- a/public/java/src/org/broadinstitute/sting/utils/Haplotype.java +++ b/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java @@ -23,7 +23,7 @@ * THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ -package org.broadinstitute.sting.utils; +package org.broadinstitute.sting.utils.haplotype; import com.google.java.contract.Requires; import net.sf.samtools.Cigar; @@ -31,6 +31,7 @@ import net.sf.samtools.CigarElement; import net.sf.samtools.CigarOperator; import org.apache.commons.lang.ArrayUtils; import org.broadinstitute.sting.gatk.contexts.ReferenceContext; +import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.sam.AlignmentUtils; import org.broadinstitute.sting.utils.sam.ReadUtils; diff --git a/public/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java b/public/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java index f6fa44ac5..9936bd9ab 100644 --- a/public/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java +++ b/public/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java @@ -26,11 +26,8 @@ package org.broadinstitute.sting.utils.haplotypeBAMWriter; import net.sf.samtools.*; -import org.broadinstitute.sting.gatk.io.StingSAMFileWriter; import org.broadinstitute.sting.utils.GenomeLoc; -import org.broadinstitute.sting.utils.Haplotype; -import org.broadinstitute.sting.utils.SWPairwiseAlignment; -import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; diff --git a/public/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java b/public/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java index aae00c3ea..08b4fff7c 100644 --- a/public/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java +++ b/public/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java @@ -27,7 +27,7 @@ package org.broadinstitute.sting.utils.haplotypeBAMWriter; import net.sf.samtools.SAMFileWriter; import org.broadinstitute.sting.utils.GenomeLoc; -import org.broadinstitute.sting.utils.Haplotype; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.genotyper.MostLikelyAllele; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; diff --git a/public/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java b/public/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java index c0d3b38fa..c80287bca 100644 --- a/public/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java +++ b/public/java/src/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java @@ -28,7 +28,7 @@ package org.broadinstitute.sting.utils.haplotypeBAMWriter; import net.sf.samtools.*; import org.broadinstitute.sting.gatk.io.StingSAMFileWriter; import org.broadinstitute.sting.utils.GenomeLoc; -import org.broadinstitute.sting.utils.Haplotype; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.SWPairwiseAlignment; import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; diff --git a/public/java/test/org/broadinstitute/sting/utils/HaplotypeUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeUnitTest.java similarity index 98% rename from public/java/test/org/broadinstitute/sting/utils/HaplotypeUnitTest.java rename to public/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeUnitTest.java index 0e4ec2b63..fe02aea9f 100644 --- a/public/java/test/org/broadinstitute/sting/utils/HaplotypeUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeUnitTest.java @@ -23,15 +23,15 @@ * THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ -package org.broadinstitute.sting.utils; +package org.broadinstitute.sting.utils.haplotype; -import net.sf.picard.util.CigarUtil; import net.sf.samtools.Cigar; import net.sf.samtools.CigarElement; import net.sf.samtools.CigarOperator; import net.sf.samtools.TextCigarCodec; import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.variant.variantcontext.Allele; import org.broadinstitute.variant.variantcontext.VariantContext; import org.broadinstitute.variant.variantcontext.VariantContextBuilder; diff --git a/public/java/test/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriterUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriterUnitTest.java index 43969c7a0..89d87a3c3 100644 --- a/public/java/test/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriterUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/haplotypeBAMWriter/HaplotypeBAMWriterUnitTest.java @@ -27,7 +27,7 @@ package org.broadinstitute.sting.utils.haplotypeBAMWriter; import net.sf.samtools.*; import org.broadinstitute.sting.BaseTest; -import org.broadinstitute.sting.utils.Haplotype; +import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.SWPairwiseAlignment; import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.sam.AlignmentUtils; From 0310499b656f4a6bc41b46c64abaabd5e473d984 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Wed, 27 Mar 2013 09:36:14 -0400 Subject: [PATCH 03/23] System to merge multiple nearby alleles into block substitutions -- Block substitution algorithm that merges nearby events based on distance. -- Also does some cleanup of GenotypingEngine --- .../haplotypecaller/DeBruijnAssembler.java | 2 +- .../haplotypecaller/GenotypingEngine.java | 339 +++++++----------- .../haplotypecaller/HaplotypeResolver.java | 4 +- .../GenotypingEngineUnitTest.java | 3 +- .../sting/utils/haplotype/EventExtractor.java | 307 ++++++++++++++++ .../sting/utils/haplotype/Haplotype.java | 12 +- .../sting/utils/sam/AlignmentUtils.java | 61 ++++ .../variant/GATKVariantContextUtils.java | 17 + .../haplotype/EventExtractorUnitTest.java | 171 +++++++++ 9 files changed, 707 insertions(+), 209 deletions(-) create mode 100644 public/java/src/org/broadinstitute/sting/utils/haplotype/EventExtractor.java create mode 100644 public/java/test/org/broadinstitute/sting/utils/haplotype/EventExtractorUnitTest.java diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java index 9bc0713c0..1fd2b9c00 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java @@ -432,7 +432,7 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { // for GGA mode, add the desired allele into the haplotype if it isn't already present if( !activeAllelesToGenotype.isEmpty() ) { - final Map eventMap = GenotypingEngine.generateVCsFromAlignment( h, h.getAlignmentStartHapwrtRef(), h.getCigar(), refWithPadding, h.getBases(), refLoc, "HCassembly" ); // BUGBUG: need to put this function in a shared place + final Map eventMap = GenotypingEngine.generateVCsFromAlignment( h, refWithPadding, refLoc, "HCassembly" ); // BUGBUG: need to put this function in a shared place for( final VariantContext compVC : activeAllelesToGenotype ) { // for GGA mode, add the desired allele into the haplotype if it isn't already present final VariantContext vcOnHaplotype = eventMap.get(compVC.getStart()); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java index 34d81d405..8e76b6ea6 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java @@ -58,6 +58,7 @@ import org.broadinstitute.sting.gatk.walkers.genotyper.UnifiedGenotyperEngine; import org.broadinstitute.sting.utils.*; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.haplotype.EventExtractor; import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.sting.utils.variant.GATKVariantContextUtils; @@ -72,7 +73,6 @@ public class GenotypingEngine { private final boolean DEBUG; private final boolean USE_FILTERED_READ_MAP_FOR_ANNOTATIONS; private final static List noCall = new ArrayList(); // used to noCall all genotypes until the exact model is applied - private final static Allele SYMBOLIC_UNASSEMBLED_EVENT_ALLELE = Allele.create("", false); private final VariantAnnotatorEngine annotationEngine; public GenotypingEngine( final boolean DEBUG, final VariantAnnotatorEngine annotationEngine, final boolean USE_FILTERED_READ_MAP_FOR_ANNOTATIONS ) { @@ -145,99 +145,26 @@ public class GenotypingEngine { final GenomeLocParser genomeLocParser, final List activeAllelesToGenotype ) { // sanity check input arguments - if (UG_engine == null) - throw new IllegalArgumentException("UG_Engine input can't be null, got "+UG_engine); - if (haplotypes == null || haplotypes.isEmpty()) - throw new IllegalArgumentException("haplotypes input should be non-empty and non-null, got "+haplotypes); - if (samples == null || samples.isEmpty()) - throw new IllegalArgumentException("samples input must be non-empty and non-null, got "+samples); - if (haplotypeReadMap == null || haplotypeReadMap.isEmpty()) - throw new IllegalArgumentException("haplotypeReadMap input should be non-empty and non-null, got "+haplotypeReadMap); - if (ref == null || ref.length == 0 ) - throw new IllegalArgumentException("ref bytes input should be non-empty and non-null, got "+ref); - if (refLoc == null || refLoc.getStop()-refLoc.getStart()+1 != ref.length) - throw new IllegalArgumentException(" refLoc must be non-null and length must match ref bytes, got "+refLoc); - if (activeRegionWindow == null ) - throw new IllegalArgumentException("activeRegionWindow must be non-null, got "+activeRegionWindow); - if (activeAllelesToGenotype == null ) - throw new IllegalArgumentException("activeAllelesToGenotype must be non-null, got "+activeAllelesToGenotype); - if (genomeLocParser == null ) - throw new IllegalArgumentException("genomeLocParser must be non-null, got "+genomeLocParser); + if (UG_engine == null) throw new IllegalArgumentException("UG_Engine input can't be null, got "+UG_engine); + if (haplotypes == null || haplotypes.isEmpty()) throw new IllegalArgumentException("haplotypes input should be non-empty and non-null, got "+haplotypes); + if (samples == null || samples.isEmpty()) throw new IllegalArgumentException("samples input must be non-empty and non-null, got "+samples); + if (haplotypeReadMap == null || haplotypeReadMap.isEmpty()) throw new IllegalArgumentException("haplotypeReadMap input should be non-empty and non-null, got "+haplotypeReadMap); + if (ref == null || ref.length == 0 ) throw new IllegalArgumentException("ref bytes input should be non-empty and non-null, got "+ref); + if (refLoc == null || refLoc.getStop()-refLoc.getStart()+1 != ref.length) throw new IllegalArgumentException(" refLoc must be non-null and length must match ref bytes, got "+refLoc); + if (activeRegionWindow == null ) throw new IllegalArgumentException("activeRegionWindow must be non-null, got "+activeRegionWindow); + if (activeAllelesToGenotype == null ) throw new IllegalArgumentException("activeAllelesToGenotype must be non-null, got "+activeAllelesToGenotype); + if (genomeLocParser == null ) throw new IllegalArgumentException("genomeLocParser must be non-null, got "+genomeLocParser); - final List returnCalls = new ArrayList(); - final boolean in_GGA_mode = !activeAllelesToGenotype.isEmpty(); - - // Using the cigar from each called haplotype figure out what events need to be written out in a VCF file - final TreeSet startPosKeySet = new TreeSet(); - int count = 0; - if( DEBUG ) { logger.info("=== Best Haplotypes ==="); } - for( final Haplotype h : haplotypes ) { - // Walk along the alignment and turn any difference from the reference into an event - h.setEventMap( generateVCsFromAlignment( h, h.getAlignmentStartHapwrtRef(), h.getCigar(), ref, h.getBases(), refLoc, "HC" + count++ ) ); - if( !in_GGA_mode ) { startPosKeySet.addAll(h.getEventMap().keySet()); } - if( DEBUG ) { - logger.info(h.toString()); - logger.info("> Cigar = " + h.getCigar()); - logger.info(">> Events = " + h.getEventMap()); - } - } - - cleanUpSymbolicUnassembledEvents( haplotypes ); - if( !in_GGA_mode && samples.size() >= 10 ) { // if not in GGA mode and have at least 10 samples try to create MNP and complex events by looking at LD structure - mergeConsecutiveEventsBasedOnLD( haplotypes, samples, haplotypeReadMap, startPosKeySet, ref, refLoc ); - cleanUpSymbolicUnassembledEvents( haplotypes ); // the newly created merged events could be overlapping the unassembled events - } - if( in_GGA_mode ) { - for( final VariantContext compVC : activeAllelesToGenotype ) { - startPosKeySet.add( compVC.getStart() ); - } - } - - final Set calledHaplotypes = new HashSet(); + // update the haplotypes so we're ready to call, getting the ordered list of positions on the reference + // that carry events among the haplotypes + final TreeSet startPosKeySet = decomposeHaplotypesIntoVariantContexts(haplotypes, samples, haplotypeReadMap, ref, refLoc, activeAllelesToGenotype); // Walk along each position in the key set and create each event to be outputted + final Set calledHaplotypes = new HashSet(); + final List returnCalls = new ArrayList(); for( final int loc : startPosKeySet ) { if( loc >= activeRegionWindow.getStart() && loc <= activeRegionWindow.getStop() ) { // genotyping an event inside this active region - final List eventsAtThisLoc = new ArrayList(); // the overlapping events to merge into a common reference view - final List priorityList = new ArrayList(); // used to merge overlapping events into common reference view - - if( !in_GGA_mode ) { - for( final Haplotype h : haplotypes ) { - final Map eventMap = h.getEventMap(); - final VariantContext vc = eventMap.get(loc); - if( vc != null && !containsVCWithMatchingAlleles(eventsAtThisLoc, vc) ) { - eventsAtThisLoc.add(vc); - priorityList.add(vc.getSource()); - } - } - } else { // we are in GGA mode! - int compCount = 0; - for( final VariantContext compVC : activeAllelesToGenotype ) { - if( compVC.getStart() == loc ) { - int alleleCount = 0; - for( final Allele compAltAllele : compVC.getAlternateAlleles() ) { - List alleleSet = new ArrayList(2); - alleleSet.add(compVC.getReference()); - alleleSet.add(compAltAllele); - final String vcSourceName = "Comp" + compCount + "Allele" + alleleCount; - // check if this event is already in the list of events due to a repeat in the input alleles track - final VariantContext candidateEventToAdd = new VariantContextBuilder(compVC).alleles(alleleSet).source(vcSourceName).make(); - boolean alreadyExists = false; - for( final VariantContext eventToTest : eventsAtThisLoc ) { - if( eventToTest.hasSameAllelesAs(candidateEventToAdd) ) { - alreadyExists = true; - } - } - if( !alreadyExists ) { - priorityList.add(vcSourceName); - eventsAtThisLoc.add(candidateEventToAdd); - } - alleleCount++; - } - } - compCount++; - } - } + final List eventsAtThisLoc = getVCsAtThisLocation(haplotypes, loc, activeAllelesToGenotype); if( eventsAtThisLoc.isEmpty() ) { continue; } @@ -245,7 +172,7 @@ public class GenotypingEngine { final Map> eventMapper = createEventMapper(loc, eventsAtThisLoc, haplotypes); // Sanity check the priority list for mistakes - validatePriorityList( priorityList, eventsAtThisLoc ); + final List priorityList = makePriorityList(eventsAtThisLoc); // Merge the event to find a common reference representation final VariantContext mergedVC = GATKVariantContextUtils.simpleMerge(eventsAtThisLoc, priorityList, GATKVariantContextUtils.FilteredRecordMergeType.KEEP_IF_ANY_UNFILTERED, GATKVariantContextUtils.GenotypeMergeType.PRIORITIZE, false, false, null, false, false); @@ -264,7 +191,6 @@ public class GenotypingEngine { if( DEBUG ) { logger.info("Genotyping event at " + loc + " with alleles = " + mergedVC.getAlleles()); - //System.out.println("Event/haplotype allele mapping = " + alleleMapper); } final Map alleleReadMap = convertHaplotypeReadMapToAlleleReadMap( haplotypeReadMap, alleleMapper, UG_engine.getUAC().CONTAMINATION_FRACTION, UG_engine.getUAC().contaminationLog ); @@ -277,7 +203,6 @@ public class GenotypingEngine { final Map stratifiedReadMap = filterToOnlyOverlappingReads( genomeLocParser, alleleReadMap_annotations, perSampleFilteredReadList, call ); VariantContext annotatedCall = call; - // TODO -- should be before annotated call, so that QDL works correctly if( annotatedCall.getAlleles().size() != mergedVC.getAlleles().size() ) { // some alleles were removed so reverseTrimming might be necessary! annotatedCall = GATKVariantContextUtils.reverseTrimAlleles(annotatedCall); } @@ -295,6 +220,117 @@ public class GenotypingEngine { return new CalledHaplotypes(returnCalls, calledHaplotypes); } + /** + * Go through the haplotypes we assembled, and decompose them into their constituent variant contexts + * + * @param haplotypes the list of haplotypes we're working with + * @param samples the samples we're working with + * @param haplotypeReadMap map from samples -> the per read allele likelihoods + * @param ref the reference bases (over the same interval as the haplotypes) + * @param refLoc the span of the reference bases + * @param activeAllelesToGenotype alleles we want to ensure are scheduled for genotyping (GGA mode) + * @return + */ + private TreeSet decomposeHaplotypesIntoVariantContexts(final List haplotypes, + final List samples, + final Map haplotypeReadMap, + final byte[] ref, + final GenomeLoc refLoc, + final List activeAllelesToGenotype) { + final boolean in_GGA_mode = !activeAllelesToGenotype.isEmpty(); + int hapNumber = 0; + + // Using the cigar from each called haplotype figure out what events need to be written out in a VCF file + final TreeSet startPosKeySet = new TreeSet(); + + if( DEBUG ) logger.info("=== Best Haplotypes ==="); + for( final Haplotype h : haplotypes ) { + // Walk along the alignment and turn any difference from the reference into an event + h.setEventMap( new EventExtractor( h, ref, refLoc, "HC" + hapNumber++ ) ); + if( ! in_GGA_mode ) { + startPosKeySet.addAll(h.getEventMap().getStartPositions()); + } + + if( DEBUG ) { + logger.info(h.toString()); + logger.info("> Cigar = " + h.getCigar()); + logger.info(">> Events = " + h.getEventMap()); + } + } + + cleanUpSymbolicUnassembledEvents( haplotypes ); + if ( !in_GGA_mode && samples.size() >= 10 ) { + // if not in GGA mode and have at least 10 samples try to create MNP and complex events by looking at LD structure + mergeConsecutiveEventsBasedOnLD( haplotypes, samples, haplotypeReadMap, startPosKeySet, ref, refLoc ); + cleanUpSymbolicUnassembledEvents( haplotypes ); // the newly created merged events could be overlapping the unassembled events + } + + if ( in_GGA_mode ) { + for( final VariantContext compVC : activeAllelesToGenotype ) { + startPosKeySet.add( compVC.getStart() ); + } + } + + return startPosKeySet; + } + + /** + * Get the priority list (just the list of sources for these variant context) used to merge overlapping events into common reference view + * @param vcs a list of variant contexts + * @return the list of the sources of vcs in the same order + */ + private List makePriorityList(final List vcs) { + final List priorityList = new LinkedList(); + for ( final VariantContext vc : vcs ) priorityList.add(vc.getSource()); + + return priorityList; + } + + private List getVCsAtThisLocation(final List haplotypes, + final int loc, + final List activeAllelesToGenotype) { + // the overlapping events to merge into a common reference view + final List eventsAtThisLoc = new ArrayList(); + + if( activeAllelesToGenotype.isEmpty() ) { + for( final Haplotype h : haplotypes ) { + final EventExtractor eventMap = h.getEventMap(); + final VariantContext vc = eventMap.get(loc); + if( vc != null && !containsVCWithMatchingAlleles(eventsAtThisLoc, vc) ) { + eventsAtThisLoc.add(vc); + } + } + } else { // we are in GGA mode! + int compCount = 0; + for( final VariantContext compVC : activeAllelesToGenotype ) { + if( compVC.getStart() == loc ) { + int alleleCount = 0; + for( final Allele compAltAllele : compVC.getAlternateAlleles() ) { + List alleleSet = new ArrayList(2); + alleleSet.add(compVC.getReference()); + alleleSet.add(compAltAllele); + final String vcSourceName = "Comp" + compCount + "Allele" + alleleCount; + // check if this event is already in the list of events due to a repeat in the input alleles track + final VariantContext candidateEventToAdd = new VariantContextBuilder(compVC).alleles(alleleSet).source(vcSourceName).make(); + boolean alreadyExists = false; + for( final VariantContext eventToTest : eventsAtThisLoc ) { + if( eventToTest.hasSameAllelesAs(candidateEventToAdd) ) { + alreadyExists = true; + } + } + if( !alreadyExists ) { + eventsAtThisLoc.add(candidateEventToAdd); + } + alleleCount++; + } + } + compCount++; + } + } + + return eventsAtThisLoc; + } + /** * For a particular event described in inputVC, form PL vector for each sample by looking into allele read map and filling likelihood matrix for each allele * @param samples List of samples to genotype @@ -322,23 +358,6 @@ public class GenotypingEngine { return genotypes; } - private void validatePriorityList( final List priorityList, final List eventsAtThisLoc ) { - for( final VariantContext vc : eventsAtThisLoc ) { - if( !priorityList.contains(vc.getSource()) ) { - throw new ReviewedStingException("Event found on haplotype that wasn't added to priority list. Something went wrong in the merging of alleles."); - } - } - for( final String name : priorityList ) { - boolean found = false; - for( final VariantContext vc : eventsAtThisLoc ) { - if(vc.getSource().equals(name)) { found = true; break; } - } - if( !found ) { - throw new ReviewedStingException("Event added to priority list but wasn't found on any haplotype. Something went wrong in the merging of alleles."); - } - } - } - private static Map filterToOnlyOverlappingReads( final GenomeLocParser parser, final Map perSampleReadMap, final Map> perSampleFilteredReadList, @@ -382,10 +401,10 @@ public class GenotypingEngine { protected static void cleanUpSymbolicUnassembledEvents( final List haplotypes ) { final List haplotypesToRemove = new ArrayList(); for( final Haplotype h : haplotypes ) { - for( final VariantContext vc : h.getEventMap().values() ) { + for( final VariantContext vc : h.getEventMap().getVariantContexts() ) { if( vc.isSymbolic() ) { for( final Haplotype h2 : haplotypes ) { - for( final VariantContext vc2 : h2.getEventMap().values() ) { + for( final VariantContext vc2 : h2.getEventMap().getVariantContexts() ) { if( vc.getStart() == vc2.getStart() && (vc2.isIndel() || vc2.isMNP()) ) { // unfortunately symbolic alleles can't currently be combined with non-point events haplotypesToRemove.add(h); break; @@ -512,11 +531,10 @@ public class GenotypingEngine { // remove the old event from the eventMap on every haplotype and the start pos key set, replace with merged event for( final Haplotype h : haplotypes ) { - final Map eventMap = h.getEventMap(); - if( eventMap.containsKey(thisStart) && eventMap.containsKey(nextStart) ) { - eventMap.remove(thisStart); - eventMap.remove(nextStart); - eventMap.put(mergedVC.getStart(), mergedVC); + if( h.getEventMap().containsKey(thisStart) && h.getEventMap().containsKey(nextStart) ) { + h.getEventMap().remove(thisStart); + h.getEventMap().remove(nextStart); + h.getEventMap().put(mergedVC.getStart(), mergedVC); } } startPosKeySet.add(mergedVC.getStart()); @@ -697,92 +715,9 @@ public class GenotypingEngine { return eventAllelesForSample; } - protected static Map generateVCsFromAlignment( final Haplotype haplotype, final int alignmentStartHapwrtRef, final Cigar cigar, final byte[] ref, final byte[] alignment, final GenomeLoc refLoc, final String sourceNameToAdd ) { - final Map vcs = new LinkedHashMap(); - - int refPos = alignmentStartHapwrtRef; - if( refPos < 0 ) { return null; } // Protection against SW failures - int alignmentPos = 0; - - for( int cigarIndex = 0; cigarIndex < cigar.numCigarElements(); cigarIndex++ ) { - final CigarElement ce = cigar.getCigarElement(cigarIndex); - final int elementLength = ce.getLength(); - switch( ce.getOperator() ) { - case I: - { - if( refPos > 0 ) { // protect against trying to create insertions/deletions at the beginning of a contig - final List insertionAlleles = new ArrayList(); - final int insertionStart = refLoc.getStart() + refPos - 1; - final byte refByte = ref[refPos-1]; - if( BaseUtils.isRegularBase(refByte) ) { - insertionAlleles.add( Allele.create(refByte, true) ); - } - if( cigarIndex == 0 || cigarIndex == cigar.getCigarElements().size() - 1 ) { // if the insertion isn't completely resolved in the haplotype then make it a symbolic allele - insertionAlleles.add( SYMBOLIC_UNASSEMBLED_EVENT_ALLELE ); - } else { - byte[] insertionBases = new byte[]{}; - insertionBases = ArrayUtils.add(insertionBases, ref[refPos-1]); // add the padding base - insertionBases = ArrayUtils.addAll(insertionBases, Arrays.copyOfRange( alignment, alignmentPos, alignmentPos + elementLength )); - if( BaseUtils.isAllRegularBases(insertionBases) ) { - insertionAlleles.add( Allele.create(insertionBases, false) ); - } - } - if( insertionAlleles.size() == 2 ) { // found a proper ref and alt allele - vcs.put(insertionStart, new VariantContextBuilder(sourceNameToAdd, refLoc.getContig(), insertionStart, insertionStart, insertionAlleles).make()); - } - } - alignmentPos += elementLength; - break; - } - case S: - { - alignmentPos += elementLength; - break; - } - case D: - { - if( refPos > 0 ) { // protect against trying to create insertions/deletions at the beginning of a contig - final byte[] deletionBases = Arrays.copyOfRange( ref, refPos - 1, refPos + elementLength ); // add padding base - final List deletionAlleles = new ArrayList(); - final int deletionStart = refLoc.getStart() + refPos - 1; - final byte refByte = ref[refPos-1]; - if( BaseUtils.isRegularBase(refByte) && BaseUtils.isAllRegularBases(deletionBases) ) { - deletionAlleles.add( Allele.create(deletionBases, true) ); - deletionAlleles.add( Allele.create(refByte, false) ); - vcs.put(deletionStart, new VariantContextBuilder(sourceNameToAdd, refLoc.getContig(), deletionStart, deletionStart + elementLength, deletionAlleles).make()); - } - } - refPos += elementLength; - break; - } - case M: - case EQ: - case X: - { - for( int iii = 0; iii < elementLength; iii++ ) { - final byte refByte = ref[refPos]; - final byte altByte = alignment[alignmentPos]; - if( refByte != altByte ) { // SNP! - if( BaseUtils.isRegularBase(refByte) && BaseUtils.isRegularBase(altByte) ) { - final List snpAlleles = new ArrayList(); - snpAlleles.add( Allele.create( refByte, true ) ); - snpAlleles.add( Allele.create( altByte, false ) ); - vcs.put(refLoc.getStart() + refPos, new VariantContextBuilder(sourceNameToAdd, refLoc.getContig(), refLoc.getStart() + refPos, refLoc.getStart() + refPos, snpAlleles).make()); - } - } - refPos++; - alignmentPos++; - } - break; - } - case N: - case H: - case P: - default: - throw new ReviewedStingException( "Unsupported cigar operator created during SW alignment: " + ce.getOperator() ); - } - } - return vcs; + @Deprecated + protected static Map generateVCsFromAlignment( final Haplotype haplotype, final byte[] ref, final GenomeLoc refLoc, final String sourceNameToAdd ) { + return new EventExtractor(haplotype, ref, refLoc, sourceNameToAdd); } protected static boolean containsVCWithMatchingAlleles( final List list, final VariantContext vcToTest ) { diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeResolver.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeResolver.java index 03af9b59b..134863b8b 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeResolver.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeResolver.java @@ -360,8 +360,8 @@ public class HaplotypeResolver extends RodWalker { } // order results by start position - final TreeMap source1Map = new TreeMap(GenotypingEngine.generateVCsFromAlignment(new Haplotype(source1Haplotype), 0, swConsensus1.getCigar(), refContext.getBases(), source1Haplotype, refContext.getWindow(), source1)); - final TreeMap source2Map = new TreeMap(GenotypingEngine.generateVCsFromAlignment(new Haplotype(source2Haplotype), 0, swConsensus2.getCigar(), refContext.getBases(), source2Haplotype, refContext.getWindow(), source2)); + final TreeMap source1Map = new TreeMap(GenotypingEngine.generateVCsFromAlignment(new Haplotype(source1Haplotype, false, 0, swConsensus1.getCigar()), refContext.getBases(), refContext.getWindow(), source1)); + final TreeMap source2Map = new TreeMap(GenotypingEngine.generateVCsFromAlignment(new Haplotype(source2Haplotype, false, 0, swConsensus2.getCigar()), refContext.getBases(), refContext.getWindow(), source2)); if ( source1Map.size() == 0 || source2Map.size() == 0 ) { // TODO -- handle errors appropriately logger.debug("No source alleles; aborting at " + refContext.getLocus()); diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngineUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngineUnitTest.java index 2be42337d..9fb75463a 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngineUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngineUnitTest.java @@ -199,7 +199,8 @@ public class GenotypingEngineUnitTest extends BaseTest { public Map calcAlignment() { final SWPairwiseAlignment alignment = new SWPairwiseAlignment(ref, hap); - return GenotypingEngine.generateVCsFromAlignment( new Haplotype(hap), alignment.getAlignmentStart2wrt1(), alignment.getCigar(), ref, hap, genomeLocParser.createGenomeLoc("4",1,1+ref.length), "name"); + final Haplotype h = new Haplotype(hap, false, alignment.getAlignmentStart2wrt1(), alignment.getCigar()); + return GenotypingEngine.generateVCsFromAlignment( h, ref, genomeLocParser.createGenomeLoc("4",1,1+ref.length), "name"); } } diff --git a/public/java/src/org/broadinstitute/sting/utils/haplotype/EventExtractor.java b/public/java/src/org/broadinstitute/sting/utils/haplotype/EventExtractor.java new file mode 100644 index 000000000..c32cde641 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/haplotype/EventExtractor.java @@ -0,0 +1,307 @@ +/* + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.utils.haplotype; + +import com.google.java.contract.Ensures; +import com.google.java.contract.Requires; +import net.sf.samtools.Cigar; +import net.sf.samtools.CigarElement; +import org.apache.commons.lang.ArrayUtils; +import org.apache.log4j.Logger; +import org.broadinstitute.sting.utils.BaseUtils; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; +import org.broadinstitute.sting.utils.sam.AlignmentUtils; +import org.broadinstitute.sting.utils.variant.GATKVariantContextUtils; +import org.broadinstitute.variant.variantcontext.Allele; +import org.broadinstitute.variant.variantcontext.VariantContext; +import org.broadinstitute.variant.variantcontext.VariantContextBuilder; + +import java.util.*; + +/** + * Extract simple VariantContext events from a single haplotype + * + * User: depristo + * Date: 3/27/13 + * Time: 8:35 AM + */ +public class EventExtractor extends TreeMap { + private final static Logger logger = Logger.getLogger(EventExtractor.class); + private final static boolean mergeClumpedEvents = true; + protected final static int MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION = 3; + public final static Allele SYMBOLIC_UNASSEMBLED_EVENT_ALLELE = Allele.create("", false); + + public EventExtractor( final Haplotype haplotype, final byte[] ref, final GenomeLoc refLoc, final String sourceNameToAdd ) { + super(); + + processCigarForInitialEvents(haplotype, ref, refLoc, sourceNameToAdd); + if ( mergeClumpedEvents && getNumberOfEvents() >= MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION) { + replaceClumpedEventsWithBlockSubstititions(haplotype, ref, refLoc); + } + } + + /** + * For testing. Let's you set up a explicit configuration without having to process a haplotype and reference + * @param stateForTesting + */ + protected EventExtractor(final Map stateForTesting) { + super(stateForTesting); + } + + /** + * For testing. Let's you set up a explicit configuration without having to process a haplotype and reference + * @param stateForTesting + */ + protected EventExtractor(final Collection stateForTesting) { + for ( final VariantContext vc : stateForTesting ) + addVC(vc); + } + + protected void processCigarForInitialEvents(final Haplotype haplotype, final byte[] ref, final GenomeLoc refLoc, final String sourceNameToAdd) { + final Cigar cigar = haplotype.getCigar(); + final byte[] alignment = haplotype.getBases(); + + int refPos = haplotype.getAlignmentStartHapwrtRef(); + if( refPos < 0 ) { + return; + } // Protection against SW failures + + int alignmentPos = 0; + + for( int cigarIndex = 0; cigarIndex < cigar.numCigarElements(); cigarIndex++ ) { + final CigarElement ce = cigar.getCigarElement(cigarIndex); + final int elementLength = ce.getLength(); + switch( ce.getOperator() ) { + case I: + { + if( refPos > 0 ) { // protect against trying to create insertions/deletions at the beginning of a contig + final List insertionAlleles = new ArrayList(); + final int insertionStart = refLoc.getStart() + refPos - 1; + final byte refByte = ref[refPos-1]; + if( BaseUtils.isRegularBase(refByte) ) { + insertionAlleles.add( Allele.create(refByte, true) ); + } + if( cigarIndex == 0 || cigarIndex == cigar.getCigarElements().size() - 1 ) { // if the insertion isn't completely resolved in the haplotype then make it a symbolic allele + insertionAlleles.add( SYMBOLIC_UNASSEMBLED_EVENT_ALLELE ); + } else { + byte[] insertionBases = new byte[]{}; + insertionBases = ArrayUtils.add(insertionBases, ref[refPos - 1]); // add the padding base + insertionBases = ArrayUtils.addAll(insertionBases, Arrays.copyOfRange(alignment, alignmentPos, alignmentPos + elementLength)); + if( BaseUtils.isAllRegularBases(insertionBases) ) { + insertionAlleles.add( Allele.create(insertionBases, false) ); + } + } + if( insertionAlleles.size() == 2 ) { // found a proper ref and alt allele + addVC(new VariantContextBuilder(sourceNameToAdd, refLoc.getContig(), insertionStart, insertionStart, insertionAlleles).make()); + } + } + alignmentPos += elementLength; + break; + } + case S: + { + alignmentPos += elementLength; + break; + } + case D: + { + if( refPos > 0 ) { // protect against trying to create insertions/deletions at the beginning of a contig + final byte[] deletionBases = Arrays.copyOfRange( ref, refPos - 1, refPos + elementLength ); // add padding base + final List deletionAlleles = new ArrayList(); + final int deletionStart = refLoc.getStart() + refPos - 1; + final byte refByte = ref[refPos-1]; + if( BaseUtils.isRegularBase(refByte) && BaseUtils.isAllRegularBases(deletionBases) ) { + deletionAlleles.add( Allele.create(deletionBases, true) ); + deletionAlleles.add( Allele.create(refByte, false) ); + addVC(new VariantContextBuilder(sourceNameToAdd, refLoc.getContig(), deletionStart, deletionStart + elementLength, deletionAlleles).make()); + } + } + refPos += elementLength; + break; + } + case M: + case EQ: + case X: + { + for( int iii = 0; iii < elementLength; iii++ ) { + final byte refByte = ref[refPos]; + final byte altByte = alignment[alignmentPos]; + if( refByte != altByte ) { // SNP! + if( BaseUtils.isRegularBase(refByte) && BaseUtils.isRegularBase(altByte) ) { + final List snpAlleles = new ArrayList(); + snpAlleles.add( Allele.create( refByte, true ) ); + snpAlleles.add( Allele.create( altByte, false ) ); + addVC(new VariantContextBuilder(sourceNameToAdd, refLoc.getContig(), refLoc.getStart() + refPos, refLoc.getStart() + refPos, snpAlleles).make()); + } + } + refPos++; + alignmentPos++; + } + break; + } + case N: + case H: + case P: + default: + throw new ReviewedStingException( "Unsupported cigar operator created during SW alignment: " + ce.getOperator() ); + } + } + } + + private void addVC(final VariantContext vc) { + addVC(vc, true); + } + + private void addVC(final VariantContext vc, final boolean merge) { + if ( containsKey(vc.getStart()) ) { + if ( merge ) { + final VariantContext prev = get(vc.getStart()); + put(vc.getStart(), makeBlock(prev, vc)); + } else { + throw new IllegalStateException("Will not merge previously bound variant contexts as merge is false at " + vc); + } + } else + put(vc.getStart(), vc); + } + + private VariantContext makeBlock(final VariantContext vc1, final VariantContext vc2) { + if ( ! vc1.isSNP() ) throw new IllegalArgumentException("vc1 must be a snp"); + + Allele ref, alt; + final VariantContextBuilder b = new VariantContextBuilder(vc1); + if ( vc1.getReference().equals(vc2.getReference()) ) { + // we've got an insertion, so we just update the alt to have the prev alt + ref = vc1.getReference(); + alt = Allele.create(vc1.getAlternateAllele(0).getDisplayString() + vc2.getAlternateAllele(0).getDisplayString().substring(1), false); + } else { + // we're dealing with a deletion, so we patch the ref + ref = vc2.getReference(); + alt = vc1.getAlternateAllele(0); + b.stop(vc2.getEnd()); + } + + return b.alleles(Arrays.asList(ref, alt)).make(); + } + + // TODO -- warning this is an O(N^3) algorithm because I'm just lazy. If it's valuable we need to reengineer it + @Requires("getNumberOfEvents() > 0") + protected void replaceClumpedEventsWithBlockSubstititions(final Haplotype haplotype, final byte[] ref, final GenomeLoc refLoc) { + int lastStart = -1; + for ( boolean foundOne = true; foundOne; ) { + foundOne = false; + for ( final VariantContext vc : getVariantContexts() ) { + if ( vc.getStart() > lastStart ) { + lastStart = vc.getStart(); + final List neighborhood = getNeighborhood(vc, 10); + if ( updateToBlockSubstitutionIfBetter(neighborhood, haplotype, ref, refLoc) ) { + foundOne = true; + break; + } + } + } + } + } + + protected boolean updateToBlockSubstitutionIfBetter(final List neighbors, final Haplotype haplotype, final byte[] ref, final GenomeLoc refLoc) { + if (neighbors.size() < MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION) + return false; + // TODO -- need more tests to decide if this is really so good + + final VariantContext first = neighbors.get(0); + final int refStartOffset = first.getStart() - refLoc.getStart(); + final int refEndOffset = neighbors.get(neighbors.size() - 1).getEnd() - refLoc.getStart(); + + final byte[] refBases = Arrays.copyOfRange(ref, refStartOffset, refEndOffset + 1); + final byte[] hapBases = AlignmentUtils.getBasesCoveringRefInterval(refStartOffset, refEndOffset, haplotype.getBases(), haplotype.getAlignmentStartHapwrtRef(), haplotype.getCigar()); + + final VariantContextBuilder builder = new VariantContextBuilder(first); + builder.stop(first.getStart() + refBases.length - 1); + builder.alleles(Arrays.asList(Allele.create(refBases, true), Allele.create(hapBases))); + final VariantContext block = builder.make(); + + // remove all merged events + for ( final VariantContext merged : neighbors ) { + if ( remove(merged.getStart()) == null ) + throw new IllegalArgumentException("Expected to remove variant context from the event map but remove said there wasn't any element there: " + merged); + } + + // note must be after we remove the previous events as the treeset only allows one key per start + logger.info("Transforming into block substitution at " + block); + addVC(block, false); + + return true; + } + + /** + * Get all of the variant contexts starting at leftMost that are within maxBP of each other + * + * @param leftMost the left most (smallest position) variant context that will start the neighborhood + * @param maxBPBetweenEvents the maximum distance in BP between the end of one event the start of the next + * to be included the the resulting list + * @return a list that contains at least one element (leftMost) + */ + @Requires({"leftMost != null", "maxBPBetweenEvents >= 0"}) + @Ensures({"result != null", "! result.isEmpty()"}) + protected List getNeighborhood(final VariantContext leftMost, final int maxBPBetweenEvents) { + final List neighbors = new LinkedList(); + + VariantContext left = leftMost; + for ( final VariantContext vc : getVariantContexts() ) { + if ( vc.getStart() < leftMost.getStart() ) + continue; + + if ( vc.getStart() - left.getEnd() < maxBPBetweenEvents ) { + // this vc is within max distance to the end of the left event, so accumulate it + neighbors.add(vc); + left = vc; + } + } + + return neighbors; + } + + public Set getStartPositions() { + return keySet(); + } + + public Collection getVariantContexts() { + return values(); + } + + public int getNumberOfEvents() { + return size(); + } + + @Override + public String toString() { + final StringBuilder b = new StringBuilder("EventExtractor{"); + for ( final VariantContext vc : getVariantContexts() ) + b.append(String.format("%s:%d-%d %s,", vc.getChr(), vc.getStart(), vc.getEnd(), vc.getAlleles())); + b.append("}"); + return b.toString(); + } +} diff --git a/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java b/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java index 6dc223616..2e95fb03a 100644 --- a/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java +++ b/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java @@ -43,7 +43,7 @@ import java.util.*; public class Haplotype extends Allele { private GenomeLoc genomeLocation = null; - private Map eventMap = null; + private EventExtractor eventMap = null; private Cigar cigar; private int alignmentStartHapwrtRef; private Event artificialEvent = null; @@ -63,6 +63,12 @@ public class Haplotype extends Allele { this(bases, false); } + public Haplotype( final byte[] bases, final boolean isRef, final int alignmentStartHapwrtRef, final Cigar cigar) { + this(bases, isRef); + this.alignmentStartHapwrtRef = alignmentStartHapwrtRef; + this.cigar = cigar; + } + /** * Copy constructor. Note the ref state of the provided allele is ignored! * @@ -92,11 +98,11 @@ public class Haplotype extends Allele { return Arrays.hashCode(getBases()); } - public Map getEventMap() { + public EventExtractor getEventMap() { return eventMap; } - public void setEventMap( final Map eventMap ) { + public void setEventMap( final EventExtractor eventMap ) { this.eventMap = eventMap; } diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java b/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java index 58f70d4b6..9b25b00c6 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java @@ -48,6 +48,67 @@ public final class AlignmentUtils { // cannot be instantiated private AlignmentUtils() { } + /** + * Get the byte[] from bases that cover the reference interval refStart -> refEnd given the + * alignment of bases to the reference (basesToRefCigar) and the start offset of the bases on the reference + * + * refStart and refEnd are 0 based offsets that we want to obtain. In the client code, if the reference + * bases start at position X and you want Y -> Z, refStart should be Y - X and refEnd should be Z - X. + * + * @param bases + * @param refStart + * @param refEnd + * @param basesStartOnRef where does the bases array start w.r.t. the reference start? For example, bases[0] of + * could be at refStart == 0 if basesStartOnRef == 0, but it could just as easily be at + * 10 (meaning bases doesn't fully span the reference), which would be indicated by basesStartOnRef == 10. + * It's not trivial to eliminate this parameter because it's tied up with the cigar + * @param basesToRefCigar the cigar that maps the bases to the reference genome + * @return a non-null byte[] + */ + public static byte[] getBasesCoveringRefInterval(final int refStart, final int refEnd, final byte[] bases, final int basesStartOnRef, final Cigar basesToRefCigar) { + if ( refStart < 0 || refEnd < refStart ) throw new IllegalArgumentException("Bad start " + refStart + " and/or stop " + refEnd); + if ( basesStartOnRef < 0 ) throw new IllegalArgumentException("BasesStartOnRef must be >= 0 but got " + basesStartOnRef); + if ( bases == null ) throw new IllegalArgumentException("Bases cannot be null"); + if ( basesToRefCigar == null ) throw new IllegalArgumentException("basesToRefCigar cannot be null"); + if ( bases.length != basesToRefCigar.getReadLength() ) throw new IllegalArgumentException("Mismatch in length between reference bases " + bases.length + " and cigar length " + basesToRefCigar); + + int refPos = basesStartOnRef; + int basesPos = 0; + + int basesStart = -1; + int basesStop = -1; + boolean done = false; + + for ( int iii = 0; ! done && iii < basesToRefCigar.numCigarElements(); iii++ ) { + final CigarElement ce = basesToRefCigar.getCigarElement(iii); + final int bInc, rInc; + switch ( ce.getOperator() ) { + case I: bInc = 1; rInc = 0; break; + case M: case X: case EQ: bInc = rInc = 1; break; + case D: bInc = 0; rInc = 1; break; + default: + throw new IllegalStateException("Unsupported operator " + ce); + } + + for ( int i = 0; i < ce.getLength(); i++ ) { + if ( refPos == refStart ) + basesStart = basesPos; + if ( refPos == refEnd ) { + basesStop = basesPos; + done = true; + break; + } + refPos += rInc; + basesPos += bInc; + } + } + + if ( basesStart == -1 || basesStop == -1 ) + throw new IllegalStateException("Never found start " + basesStart + " or stop " + basesStop + " given cigar " + basesToRefCigar); + + return Arrays.copyOfRange(bases, basesStart, basesStop + 1); + } + /** * Get the number of bases at which refSeq and readSeq differ, given their alignment * diff --git a/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java b/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java index 0bd30c3a4..4565402b9 100644 --- a/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/variant/GATKVariantContextUtils.java @@ -1424,4 +1424,21 @@ public class GATKVariantContextUtils { return result; } + + /** + * Are vc1 and 2 equal including their position and alleles? + * @param vc1 non-null VariantContext + * @param vc2 non-null VariantContext + * @return true if vc1 and vc2 are equal, false otherwise + */ + public static boolean equalSites(final VariantContext vc1, final VariantContext vc2) { + if ( vc1 == null ) throw new IllegalArgumentException("vc1 cannot be null"); + if ( vc2 == null ) throw new IllegalArgumentException("vc2 cannot be null"); + + if ( vc1.getStart() != vc2.getStart() ) return false; + if ( vc1.getEnd() != vc2.getEnd() ) return false; + if ( ! vc1.getChr().equals(vc2.getChr())) return false; + if ( ! vc1.getAlleles().equals(vc2.getAlleles()) ) return false; + return true; + } } diff --git a/public/java/test/org/broadinstitute/sting/utils/haplotype/EventExtractorUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/haplotype/EventExtractorUnitTest.java new file mode 100644 index 000000000..480f82a46 --- /dev/null +++ b/public/java/test/org/broadinstitute/sting/utils/haplotype/EventExtractorUnitTest.java @@ -0,0 +1,171 @@ +/* + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ + +package org.broadinstitute.sting.utils.haplotype; + +import net.sf.samtools.TextCigarCodec; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.UnvalidatingGenomeLoc; +import org.broadinstitute.sting.utils.Utils; +import org.broadinstitute.sting.utils.variant.GATKVariantContextUtils; +import org.broadinstitute.variant.variantcontext.Allele; +import org.broadinstitute.variant.variantcontext.VariantContext; +import org.broadinstitute.variant.variantcontext.VariantContextBuilder; +import org.testng.Assert; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.util.*; + +public class EventExtractorUnitTest extends BaseTest { + private final static String CHR = "20"; + private final static String NAME = "foo"; + + @DataProvider(name = "MyDataProvider") + public Object[][] makeMyDataProvider() { + List tests = new ArrayList(); + + final List SNP_ALLELES = Arrays.asList("A", "C"); + final List INS_ALLELES = Arrays.asList("A", "ACGTGA"); + final List DEL_ALLELES = Arrays.asList("ACGTA", "C"); + final List> allAlleles = Arrays.asList(SNP_ALLELES, INS_ALLELES, DEL_ALLELES); + for ( final int leftNotClump : Arrays.asList(-1, 3) ) { + for ( final int middleNotClump : Arrays.asList(-1, 10, 500) ) { + for ( final int rightNotClump : Arrays.asList(-1, 1000) ) { + for ( final int nClumped : Arrays.asList(3, 4) ) { + for ( final List> alleles : Utils.makePermutations(allAlleles, nClumped, true)) { + final List allVCS = new LinkedList(); + + if ( leftNotClump != -1 ) allVCS.add(GATKVariantContextUtils.makeFromAlleles(NAME, CHR, leftNotClump, SNP_ALLELES)); + if ( middleNotClump != -1 ) allVCS.add(GATKVariantContextUtils.makeFromAlleles(NAME, CHR, middleNotClump, SNP_ALLELES)); + if ( rightNotClump != -1 ) allVCS.add(GATKVariantContextUtils.makeFromAlleles(NAME, CHR, rightNotClump, SNP_ALLELES)); + + int clumpStart = 50; + final List vcs = new LinkedList(); + for ( final List myAlleles : alleles ) { + final VariantContext vc = GATKVariantContextUtils.makeFromAlleles(NAME, CHR, clumpStart, myAlleles); + clumpStart = vc.getEnd() + 3; + vcs.add(vc); + } + + tests.add(new Object[]{new EventExtractor(new LinkedList(allVCS)), Collections.emptyList()}); + allVCS.addAll(vcs); + tests.add(new Object[]{new EventExtractor(allVCS), vcs}); + } + } + } + } + } + + return tests.toArray(new Object[][]{}); + } + + /** + * Example testng test using MyDataProvider + */ + @Test(dataProvider = "MyDataProvider", enabled = true) // TODO == reenable + public void testGetNeighborhood(final EventExtractor eventExtractor, final List expectedNeighbors) { + final VariantContext leftOfNeighors = expectedNeighbors.isEmpty() ? null : expectedNeighbors.get(0); + + for ( final VariantContext vc : eventExtractor.getVariantContexts() ) { + final List n = eventExtractor.getNeighborhood(vc, 5); + if ( leftOfNeighors == vc ) + Assert.assertEquals(n, expectedNeighbors); + else if ( ! expectedNeighbors.contains(vc) ) + Assert.assertEquals(n, Collections.singletonList(vc), "Should only contain the original vc but " + n); + } + } + + @DataProvider(name = "BlockSubstitutionsData") + public Object[][] makeBlockSubstitutionsData() { + List tests = new ArrayList(); + + for ( int size = EventExtractor.MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION; size < 10; size++ ) { + final String ref = Utils.dupString("A", size); + final String alt = Utils.dupString("C", size); + tests.add(new Object[]{ref, alt, size + "M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1, Arrays.asList(ref, alt))}); + } + + tests.add(new Object[]{"AAAAAA", "GAGAGA", "6M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1, Arrays.asList("AAAAA", "GAGAG"))}); + tests.add(new Object[]{"AAAAAA", "GAGAGG", "6M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1, Arrays.asList("AAAAAA", "GAGAGG"))}); + + for ( int len = 0; len < 10; len++ ) { + final String s = len == 0 ? "" : Utils.dupString("A", len); + tests.add(new Object[]{s + "AACCCCAA", s + "GAAG", len + 2 + "M4D2M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1 + len, Arrays.asList("AACCCCAA", "GAAG"))}); + tests.add(new Object[]{s + "AAAA", s + "GACCCCAG", len + 2 + "M4I2M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1 + len, Arrays.asList("AAAA", "GACCCCAG"))}); + + tests.add(new Object[]{"AACCCCAA" + s, "GAAG" + s, "2M4D" + (len + 2) + "M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1, Arrays.asList("AACCCCAA", "GAAG"))}); + tests.add(new Object[]{"AAAA" + s, "GACCCCAG" + s, "2M4I" + (len + 2) + "M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1, Arrays.asList("AAAA", "GACCCCAG"))}); + } + + return tests.toArray(new Object[][]{}); + } + + /** + * Example testng test using MyDataProvider + */ + @Test(dataProvider = "BlockSubstitutionsData") + public void testBlockSubstitutionsData(final String refBases, final String haplotypeBases, final String cigar, final VariantContext expectedBlock) { + final Haplotype hap = new Haplotype(haplotypeBases.getBytes(), false, 0, TextCigarCodec.getSingleton().decode(cigar)); + final GenomeLoc loc = new UnvalidatingGenomeLoc(CHR, 0, 1, refBases.length()); + final EventExtractor ee = new EventExtractor(hap, refBases.getBytes(), loc, NAME); + Assert.assertEquals(ee.getNumberOfEvents(), 1); + final VariantContext actual = ee.getVariantContexts().iterator().next(); + Assert.assertTrue(GATKVariantContextUtils.equalSites(actual, expectedBlock), "Failed with " + actual); + } + + @DataProvider(name = "AdjacentSNPIndelTest") + public Object[][] makeAdjacentSNPIndelTest() { + List tests = new ArrayList(); + + tests.add(new Object[]{"TT", "GCT", "1M1I1M", Arrays.asList(Arrays.asList("T", "GC"))}); + tests.add(new Object[]{"GCT", "TT", "1M1D", Arrays.asList(Arrays.asList("GC", "T"))}); + tests.add(new Object[]{"TT", "GCCT", "1M2I1M", Arrays.asList(Arrays.asList("T", "GCC"))}); + tests.add(new Object[]{"GCCT", "TT", "1M2D", Arrays.asList(Arrays.asList("GCC", "T"))}); + tests.add(new Object[]{"AAGCCT", "AATT", "3M2D", Arrays.asList(Arrays.asList("GCC", "T"))}); + tests.add(new Object[]{"AAGCCT", "GATT", "3M2D", Arrays.asList(Arrays.asList("A", "G"), Arrays.asList("GCC", "T"))}); + tests.add(new Object[]{"AAAAA", "AGACA", "5M", Arrays.asList(Arrays.asList("A", "G"), Arrays.asList("A", "C"))}); + + return tests.toArray(new Object[][]{}); + } + + /** + * Example testng test using MyDataProvider + */ + @Test(dataProvider = "AdjacentSNPIndelTest", enabled = true) + public void testAdjacentSNPIndelTest(final String refBases, final String haplotypeBases, final String cigar, final List> expectedAlleles) { + final Haplotype hap = new Haplotype(haplotypeBases.getBytes(), false, 0, TextCigarCodec.getSingleton().decode(cigar)); + final GenomeLoc loc = new UnvalidatingGenomeLoc(CHR, 0, 1, refBases.length()); + final EventExtractor ee = new EventExtractor(hap, refBases.getBytes(), loc, NAME); + Assert.assertEquals(ee.getNumberOfEvents(), expectedAlleles.size()); + final List actuals = new ArrayList(ee.getVariantContexts()); + for ( int i = 0; i < ee.getNumberOfEvents(); i++ ) { + final VariantContext actual = actuals.get(i); + Assert.assertEquals(actual.getReference().getDisplayString(), expectedAlleles.get(i).get(0)); + Assert.assertEquals(actual.getAlternateAllele(0).getDisplayString(), expectedAlleles.get(i).get(1)); + } + } +} From 67cd407854d5d0acaa06861745c64e9ba65e8d82 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Wed, 27 Mar 2013 09:41:04 -0400 Subject: [PATCH 04/23] The GenotypingEngine now uses the samples from the mapping of Samples -> PerReadAllele likelihoods instead of passing around a redundant list of samples --- .../haplotypecaller/GenotypingEngine.java | 28 ++++++++----------- .../haplotypecaller/HaplotypeCaller.java | 1 - 2 files changed, 11 insertions(+), 18 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java index 8e76b6ea6..59cadbdf9 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java @@ -119,9 +119,10 @@ public class GenotypingEngine { * Main entry point of class - given a particular set of haplotypes, samples and reference context, compute * genotype likelihoods and assemble into a list of variant contexts and genomic events ready for calling * + * The list of samples we're working with is obtained from the haplotypeReadMap + * * @param UG_engine UG Engine with basic input parameters * @param haplotypes Haplotypes to assign likelihoods to - * @param samples Samples to genotype * @param haplotypeReadMap Map from reads->(haplotypes,likelihoods) * @param perSampleFilteredReadList * @param ref Reference bytes at active region @@ -136,7 +137,6 @@ public class GenotypingEngine { // TODO - can this be refactored? this is hard to follow! public CalledHaplotypes assignGenotypeLikelihoods( final UnifiedGenotyperEngine UG_engine, final List haplotypes, - final List samples, final Map haplotypeReadMap, final Map> perSampleFilteredReadList, final byte[] ref, @@ -147,7 +147,6 @@ public class GenotypingEngine { // sanity check input arguments if (UG_engine == null) throw new IllegalArgumentException("UG_Engine input can't be null, got "+UG_engine); if (haplotypes == null || haplotypes.isEmpty()) throw new IllegalArgumentException("haplotypes input should be non-empty and non-null, got "+haplotypes); - if (samples == null || samples.isEmpty()) throw new IllegalArgumentException("samples input must be non-empty and non-null, got "+samples); if (haplotypeReadMap == null || haplotypeReadMap.isEmpty()) throw new IllegalArgumentException("haplotypeReadMap input should be non-empty and non-null, got "+haplotypeReadMap); if (ref == null || ref.length == 0 ) throw new IllegalArgumentException("ref bytes input should be non-empty and non-null, got "+ref); if (refLoc == null || refLoc.getStop()-refLoc.getStart()+1 != ref.length) throw new IllegalArgumentException(" refLoc must be non-null and length must match ref bytes, got "+refLoc); @@ -157,7 +156,7 @@ public class GenotypingEngine { // update the haplotypes so we're ready to call, getting the ordered list of positions on the reference // that carry events among the haplotypes - final TreeSet startPosKeySet = decomposeHaplotypesIntoVariantContexts(haplotypes, samples, haplotypeReadMap, ref, refLoc, activeAllelesToGenotype); + final TreeSet startPosKeySet = decomposeHaplotypesIntoVariantContexts(haplotypes, haplotypeReadMap, ref, refLoc, activeAllelesToGenotype); // Walk along each position in the key set and create each event to be outputted final Set calledHaplotypes = new HashSet(); @@ -195,7 +194,7 @@ public class GenotypingEngine { final Map alleleReadMap = convertHaplotypeReadMapToAlleleReadMap( haplotypeReadMap, alleleMapper, UG_engine.getUAC().CONTAMINATION_FRACTION, UG_engine.getUAC().contaminationLog ); - final GenotypesContext genotypes = calculateGLsForThisEvent( samples, alleleReadMap, mergedVC ); + final GenotypesContext genotypes = calculateGLsForThisEvent( alleleReadMap, mergedVC ); final VariantContext call = UG_engine.calculateGenotypes(new VariantContextBuilder(mergedVC).genotypes(genotypes).make(), mergedVC.isSNP() ? GenotypeLikelihoodsCalculationModel.Model.SNP : GenotypeLikelihoodsCalculationModel.Model.INDEL); if( call != null ) { final Map alleleReadMap_annotations = ( USE_FILTERED_READ_MAP_FOR_ANNOTATIONS ? alleleReadMap : @@ -224,7 +223,6 @@ public class GenotypingEngine { * Go through the haplotypes we assembled, and decompose them into their constituent variant contexts * * @param haplotypes the list of haplotypes we're working with - * @param samples the samples we're working with * @param haplotypeReadMap map from samples -> the per read allele likelihoods * @param ref the reference bases (over the same interval as the haplotypes) * @param refLoc the span of the reference bases @@ -232,7 +230,6 @@ public class GenotypingEngine { * @return */ private TreeSet decomposeHaplotypesIntoVariantContexts(final List haplotypes, - final List samples, final Map haplotypeReadMap, final byte[] ref, final GenomeLoc refLoc, @@ -259,9 +256,9 @@ public class GenotypingEngine { } cleanUpSymbolicUnassembledEvents( haplotypes ); - if ( !in_GGA_mode && samples.size() >= 10 ) { + if ( !in_GGA_mode && haplotypeReadMap.size() >= 10 ) { // if not in GGA mode and have at least 10 samples try to create MNP and complex events by looking at LD structure - mergeConsecutiveEventsBasedOnLD( haplotypes, samples, haplotypeReadMap, startPosKeySet, ref, refLoc ); + mergeConsecutiveEventsBasedOnLD( haplotypes, haplotypeReadMap, startPosKeySet, ref, refLoc ); cleanUpSymbolicUnassembledEvents( haplotypes ); // the newly created merged events could be overlapping the unassembled events } @@ -282,7 +279,6 @@ public class GenotypingEngine { private List makePriorityList(final List vcs) { final List priorityList = new LinkedList(); for ( final VariantContext vc : vcs ) priorityList.add(vc.getSource()); - return priorityList; } @@ -333,17 +329,16 @@ public class GenotypingEngine { /** * For a particular event described in inputVC, form PL vector for each sample by looking into allele read map and filling likelihood matrix for each allele - * @param samples List of samples to genotype * @param alleleReadMap Allele map describing mapping from reads to alleles and corresponding likelihoods * @param mergedVC Input VC with event to genotype * @return GenotypesContext object wrapping genotype objects with PLs */ - @Requires({"samples != null","alleleReadMap!= null", "mergedVC != null"}) + @Requires({"alleleReadMap!= null", "mergedVC != null"}) @Ensures("result != null") - private GenotypesContext calculateGLsForThisEvent( final List samples, final Map alleleReadMap, final VariantContext mergedVC ) { - final GenotypesContext genotypes = GenotypesContext.create(samples.size()); + private GenotypesContext calculateGLsForThisEvent( final Map alleleReadMap, final VariantContext mergedVC ) { + final GenotypesContext genotypes = GenotypesContext.create(alleleReadMap.size()); // Grab the genotype likelihoods from the appropriate places in the haplotype likelihood matrix -- calculation performed independently per sample - for( final String sample : samples ) { + for( final String sample : alleleReadMap.keySet() ) { final int numHaplotypes = mergedVC.getAlleles().size(); final double[] genotypeLikelihoods = new double[numHaplotypes * (numHaplotypes+1) / 2]; final double[][] haplotypeLikelihoodMatrix = LikelihoodCalculationEngine.computeDiploidHaplotypeLikelihoods(sample, alleleReadMap, mergedVC.getAlleles()); @@ -448,14 +443,12 @@ public class GenotypingEngine { /** * TODO - comment me, clean me, refactor me! * @param haplotypes - * @param samples * @param haplotypeReadMap * @param startPosKeySet * @param ref * @param refLoc */ protected void mergeConsecutiveEventsBasedOnLD( final List haplotypes, - final List samples, final Map haplotypeReadMap, final TreeSet startPosKeySet, final byte[] ref, @@ -465,6 +458,7 @@ public class GenotypingEngine { final double MERGE_EVENTS_R2_THRESHOLD = 0.95; if( startPosKeySet.size() <= 1 ) { return; } + final Set samples = haplotypeReadMap.keySet(); boolean mapWasUpdated = true; while( mapWasUpdated ) { mapWasUpdated = false; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index d77caa2a2..a6b19826b 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -559,7 +559,6 @@ public class HaplotypeCaller extends ActiveRegionWalker implem final GenotypingEngine.CalledHaplotypes calledHaplotypes = genotypingEngine.assignGenotypeLikelihoods( UG_engine, bestHaplotypes, - samplesList, stratifiedReadMap, perSampleFilteredReadList, fullReferenceWithPadding, From 8656bd5e29980029e997e0339e732e6379034f75 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Thu, 28 Mar 2013 09:40:08 -0400 Subject: [PATCH 05/23] Haplotype now consolidates cigars in setCigar -- This fixes edge base bugs where non-consolidated cigars are causing problems in users of the Haplotype object. Input arguments are now checks (let's see if we blow up) --- .../sting/utils/haplotype/Haplotype.java | 43 ++++++++++++++++--- 1 file changed, 37 insertions(+), 6 deletions(-) diff --git a/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java b/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java index 2e95fb03a..a94c08198 100644 --- a/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java +++ b/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java @@ -36,10 +36,12 @@ import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.sam.AlignmentUtils; import org.broadinstitute.sting.utils.sam.ReadUtils; import org.broadinstitute.variant.variantcontext.Allele; -import org.broadinstitute.variant.variantcontext.VariantContext; import java.io.Serializable; -import java.util.*; +import java.util.Arrays; +import java.util.Comparator; +import java.util.LinkedHashMap; +import java.util.List; public class Haplotype extends Allele { private GenomeLoc genomeLocation = null; @@ -52,21 +54,36 @@ public class Haplotype extends Allele { /** * Main constructor * - * @param bases bases - * @param isRef is reference allele? + * @param bases a non-null array of bases + * @param isRef is this the reference haplotype? */ public Haplotype( final byte[] bases, final boolean isRef ) { super(bases.clone(), isRef); } + /** + * Create a new non-ref haplotype + * + * @param bases a non-null array of bases + */ public Haplotype( final byte[] bases ) { this(bases, false); } + /** + * Create a new haplotype with bases + * + * Requires bases.length == cigar.getReadLength() + * + * @param bases a non-null array of bases + * @param isRef is this the reference haplotype? + * @param alignmentStartHapwrtRef offset of this haplotype w.r.t. the reference + * @param cigar the cigar that maps this haplotype to the reference sequence + */ public Haplotype( final byte[] bases, final boolean isRef, final int alignmentStartHapwrtRef, final Cigar cigar) { this(bases, isRef); this.alignmentStartHapwrtRef = alignmentStartHapwrtRef; - this.cigar = cigar; + setCigar(cigar); } /** @@ -127,6 +144,11 @@ public class Haplotype extends Allele { this.alignmentStartHapwrtRef = alignmentStartHapwrtRef; } + /** + * Get the cigar for this haplotype. Note that cigar is guarenteed to be consolidated + * in that multiple adjacent equal operates will have been merged + * @return the cigar of this haplotype + */ public Cigar getCigar() { return cigar; } @@ -144,8 +166,17 @@ public class Haplotype extends Allele { return AlignmentUtils.consolidateCigar(extendedHaplotypeCigar); } + /** + * Set the cigar of this haplotype to cigar. + * + * Note that this function consolidates the cigar, so that 1M1M1I1M1M => 2M1I2M + * + * @param cigar a cigar whose readLength == length() + */ public void setCigar( final Cigar cigar ) { - this.cigar = cigar; + this.cigar = AlignmentUtils.consolidateCigar(cigar); + if ( this.cigar.getReadLength() != length() ) + throw new IllegalArgumentException("Read length " + length() + " not equal to the read length of the cigar " + cigar.getReadLength()); } public boolean isArtificialHaplotype() { From 167cd49e710f8a37e37ba28528b93722f50e6253 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Thu, 28 Mar 2013 17:35:00 -0400 Subject: [PATCH 06/23] Added -forceActive argument to ActiveRegionWalkers -- Causes the ART tool to treat all bases as active. Useful for debugging --- .../sting/gatk/traversals/TraverseActiveRegions.java | 1 + .../sting/gatk/walkers/ActiveRegionWalker.java | 8 ++++++++ 2 files changed, 9 insertions(+) diff --git a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java index 7b831db32..908755a24 100644 --- a/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java +++ b/public/java/src/org/broadinstitute/sting/gatk/traversals/TraverseActiveRegions.java @@ -487,6 +487,7 @@ public class TraverseActiveRegions extends TraversalEngine extends Walker Date: Thu, 28 Mar 2013 18:17:27 -0400 Subject: [PATCH 07/23] LD-based merging algorithm for nearby events in the haplotypes -- Moved R^2 LD haplotype merging system to the utils.haplotype package -- New LD merging only enabled with HC argument. -- EventExtractor and EventExtractorUnitTest refactors so we can test the block substitution code without having to enabled it via a static variable -- A few misc. bug fixes in LDMerger itself -- Refactoring of Haplotype event splitting and merging code -- Renamed EventExtractor to EventMap -- EventMap has a static method that computes the event maps among n haplotypes -- Refactor Haplotype score and base comparators into their own classes and unit tested them -- Refactored R^2 based LD merging code into its own class HaplotypeR2Calculator and unit tested much of it. -- LDMerger now uses the HaplotypeR2Calculator, which cleans up the code a bunch and allowed me to easily test that code with a MockHaplotypeR2Calculator. For those who haven't seen this testing idiom, have a look, and very useful -- New algorithm uses a likelihood-ratio test to compute the probability that only the phased haplotypes exist in the population. -- Fixed fundamental bug in the way the previous R^2 implementation worked -- Optimizations for HaplotypeLDCalculator: only compute the per sample per haplotype summed likelihoods once, regardless of how many calls there are -- Previous version would enter infinite loop if it merged two events but the second event had other low likelihood events in other haplotypes that didn't get removed. Now when events are removed they are removed from all event maps, regardless of whether the haplotypes carry both events -- Bugfixes for EventMap in the HaplotypeCaller as well. Previous version was overly restrictive, requiring that the first event to make into a block substitution was a snp. In some cases we need to merge an insertion with a deletion, such as when the cigar is 10M2I3D4M. The new code supports this. UnitTested and documented as well. LDMerger handles case where merging two alleles results in a no-op event. Merging CA/C + A/AA -> CAA/CAA -> no op. Handles this case by removing the two events. UnitTested -- Turn off debugging output for the LDMerger in the HaplotypeCaller unless -debug was enabled -- This new version does a much more specific test (that's actually right). Here's the new algorithm: * Compute probability that two variants are in phase with each other and that no * compound hets exist in the population. * * Implemented as a likelihood ratio test of the hypothesis: * * x11 and x22 are the only haplotypes in the populations * * vs. * * all four haplotype combinations (x11, x12, x21, and x22) all exist in the population. * * Now, since we have to have both variants in the population, we exclude the x11 & x11 state. So the * p of having just x11 and x22 is P(x11 & x22) + p(x22 & x22). * * Alternatively, we might have any configuration that gives us both 1 and 2 alts, which are: * * - P(x11 & x12 & x21) -- we have hom-ref and both hets * - P(x22 & x12 & x21) -- we have hom-alt and both hets * - P(x22 & x12) -- one haplotype is 22 and the other is het 12 * - P(x22 & x21) -- one haplotype is 22 and the other is het 21 --- .../haplotypecaller/GenotypingEngine.java | 203 ++--------- .../haplotypecaller/HaplotypeCaller.java | 13 +- .../LikelihoodCalculationEngine.java | 14 +- .../haplotype/HaplotypeLDCalculator.java | 194 ++++++++++ .../sting/utils/haplotype/LDMerger.java | 303 ++++++++++++++++ .../GenotypingEngineUnitTest.java | 142 -------- .../HaplotypeBaseComparatorUnitTest.java | 77 ++++ .../HaplotypeLDCalculatorUnitTest.java | 118 +++++++ .../HaplotypeScoreComparatorUnitTest.java | 76 ++++ .../utils/haplotype/LDMergerUnitTest.java | 334 ++++++++++++++++++ .../{EventExtractor.java => EventMap.java} | 221 ++++++++---- .../sting/utils/haplotype/Haplotype.java | 38 +- .../haplotype/HaplotypeBaseComparator.java | 42 +++ .../haplotype/HaplotypeScoreComparator.java | 39 ++ ...torUnitTest.java => EventMapUnitTest.java} | 112 +++--- 15 files changed, 1451 insertions(+), 475 deletions(-) create mode 100644 protected/java/src/org/broadinstitute/sting/utils/haplotype/HaplotypeLDCalculator.java create mode 100644 protected/java/src/org/broadinstitute/sting/utils/haplotype/LDMerger.java create mode 100644 protected/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeBaseComparatorUnitTest.java create mode 100644 protected/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeLDCalculatorUnitTest.java create mode 100644 protected/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeScoreComparatorUnitTest.java create mode 100644 protected/java/test/org/broadinstitute/sting/utils/haplotype/LDMergerUnitTest.java rename public/java/src/org/broadinstitute/sting/utils/haplotype/{EventExtractor.java => EventMap.java} (58%) create mode 100644 public/java/src/org/broadinstitute/sting/utils/haplotype/HaplotypeBaseComparator.java create mode 100644 public/java/src/org/broadinstitute/sting/utils/haplotype/HaplotypeScoreComparator.java rename public/java/test/org/broadinstitute/sting/utils/haplotype/{EventExtractorUnitTest.java => EventMapUnitTest.java} (61%) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java index 59cadbdf9..7cdc57464 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java @@ -48,18 +48,18 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import com.google.java.contract.Ensures; import com.google.java.contract.Requires; -import net.sf.samtools.Cigar; -import net.sf.samtools.CigarElement; -import org.apache.commons.lang.ArrayUtils; import org.apache.log4j.Logger; import org.broadinstitute.sting.gatk.walkers.annotator.VariantAnnotatorEngine; import org.broadinstitute.sting.gatk.walkers.genotyper.GenotypeLikelihoodsCalculationModel; import org.broadinstitute.sting.gatk.walkers.genotyper.UnifiedGenotyperEngine; -import org.broadinstitute.sting.utils.*; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.GenomeLocParser; +import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; -import org.broadinstitute.sting.utils.haplotype.EventExtractor; +import org.broadinstitute.sting.utils.haplotype.EventMap; import org.broadinstitute.sting.utils.haplotype.Haplotype; +import org.broadinstitute.sting.utils.haplotype.LDMerger; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.sting.utils.variant.GATKVariantContextUtils; import org.broadinstitute.variant.variantcontext.*; @@ -74,12 +74,16 @@ public class GenotypingEngine { private final boolean USE_FILTERED_READ_MAP_FOR_ANNOTATIONS; private final static List noCall = new ArrayList(); // used to noCall all genotypes until the exact model is applied private final VariantAnnotatorEngine annotationEngine; + private final LDMerger ldMerger; - public GenotypingEngine( final boolean DEBUG, final VariantAnnotatorEngine annotationEngine, final boolean USE_FILTERED_READ_MAP_FOR_ANNOTATIONS ) { + public GenotypingEngine( final boolean DEBUG, final VariantAnnotatorEngine annotationEngine, + final boolean USE_FILTERED_READ_MAP_FOR_ANNOTATIONS, + final LDMerger ldMerger) { this.DEBUG = DEBUG; this.annotationEngine = annotationEngine; this.USE_FILTERED_READ_MAP_FOR_ANNOTATIONS = USE_FILTERED_READ_MAP_FOR_ANNOTATIONS; noCall.add(Allele.NO_CALL); + this.ldMerger = ldMerger; } /** @@ -235,31 +239,18 @@ public class GenotypingEngine { final GenomeLoc refLoc, final List activeAllelesToGenotype) { final boolean in_GGA_mode = !activeAllelesToGenotype.isEmpty(); - int hapNumber = 0; // Using the cigar from each called haplotype figure out what events need to be written out in a VCF file - final TreeSet startPosKeySet = new TreeSet(); + final TreeSet startPosKeySet = EventMap.buildEventMapsForHaplotypes(haplotypes, ref, refLoc, DEBUG); - if( DEBUG ) logger.info("=== Best Haplotypes ==="); - for( final Haplotype h : haplotypes ) { - // Walk along the alignment and turn any difference from the reference into an event - h.setEventMap( new EventExtractor( h, ref, refLoc, "HC" + hapNumber++ ) ); - if( ! in_GGA_mode ) { - startPosKeySet.addAll(h.getEventMap().getStartPositions()); - } - - if( DEBUG ) { - logger.info(h.toString()); - logger.info("> Cigar = " + h.getCigar()); - logger.info(">> Events = " + h.getEventMap()); - } - } + if ( in_GGA_mode ) startPosKeySet.clear(); cleanUpSymbolicUnassembledEvents( haplotypes ); - if ( !in_GGA_mode && haplotypeReadMap.size() >= 10 ) { + if ( !in_GGA_mode ) { // if not in GGA mode and have at least 10 samples try to create MNP and complex events by looking at LD structure - mergeConsecutiveEventsBasedOnLD( haplotypes, haplotypeReadMap, startPosKeySet, ref, refLoc ); - cleanUpSymbolicUnassembledEvents( haplotypes ); // the newly created merged events could be overlapping the unassembled events + final boolean mergedAnything = ldMerger.mergeConsecutiveEventsBasedOnLD( haplotypes, haplotypeReadMap, startPosKeySet, ref, refLoc ); + if ( mergedAnything ) + cleanUpSymbolicUnassembledEvents( haplotypes ); // the newly created merged events could be overlapping the unassembled events } if ( in_GGA_mode ) { @@ -290,7 +281,7 @@ public class GenotypingEngine { if( activeAllelesToGenotype.isEmpty() ) { for( final Haplotype h : haplotypes ) { - final EventExtractor eventMap = h.getEventMap(); + final EventMap eventMap = h.getEventMap(); final VariantContext vc = eventMap.get(loc); if( vc != null && !containsVCWithMatchingAlleles(eventsAtThisLoc, vc) ) { eventsAtThisLoc.add(vc); @@ -341,14 +332,14 @@ public class GenotypingEngine { for( final String sample : alleleReadMap.keySet() ) { final int numHaplotypes = mergedVC.getAlleles().size(); final double[] genotypeLikelihoods = new double[numHaplotypes * (numHaplotypes+1) / 2]; - final double[][] haplotypeLikelihoodMatrix = LikelihoodCalculationEngine.computeDiploidHaplotypeLikelihoods(sample, alleleReadMap, mergedVC.getAlleles()); + final double[][] haplotypeLikelihoodMatrix = LikelihoodCalculationEngine.computeDiploidHaplotypeLikelihoods(sample, alleleReadMap, mergedVC.getAlleles(), true); int glIndex = 0; for( int iii = 0; iii < numHaplotypes; iii++ ) { for( int jjj = 0; jjj <= iii; jjj++ ) { genotypeLikelihoods[glIndex++] = haplotypeLikelihoodMatrix[iii][jjj]; // for example: AA,AB,BB,AC,BC,CC } } - genotypes.add( new GenotypeBuilder(sample).alleles(noCall).PL(genotypeLikelihoods).make() ); + genotypes.add(new GenotypeBuilder(sample).alleles(noCall).PL(genotypeLikelihoods).make()); } return genotypes; } @@ -440,156 +431,6 @@ public class GenotypingEngine { return alleleReadMap; } - /** - * TODO - comment me, clean me, refactor me! - * @param haplotypes - * @param haplotypeReadMap - * @param startPosKeySet - * @param ref - * @param refLoc - */ - protected void mergeConsecutiveEventsBasedOnLD( final List haplotypes, - final Map haplotypeReadMap, - final TreeSet startPosKeySet, - final byte[] ref, - final GenomeLoc refLoc ) { - - final int MAX_SIZE_TO_COMBINE = 15; - final double MERGE_EVENTS_R2_THRESHOLD = 0.95; - if( startPosKeySet.size() <= 1 ) { return; } - - final Set samples = haplotypeReadMap.keySet(); - boolean mapWasUpdated = true; - while( mapWasUpdated ) { - mapWasUpdated = false; - - // loop over the set of start locations and consider pairs that start near each other - final Iterator iter = startPosKeySet.iterator(); - int thisStart = iter.next(); - while( iter.hasNext() ) { - final int nextStart = iter.next(); - if( nextStart - thisStart < MAX_SIZE_TO_COMBINE) { - boolean isBiallelic = true; - VariantContext thisVC = null; - VariantContext nextVC = null; - double x11 = Double.NEGATIVE_INFINITY; - double x12 = Double.NEGATIVE_INFINITY; - double x21 = Double.NEGATIVE_INFINITY; - double x22 = Double.NEGATIVE_INFINITY; - - for( final Haplotype h : haplotypes ) { - // only make complex substitutions out of consecutive biallelic sites - final VariantContext thisHapVC = h.getEventMap().get(thisStart); - if( thisHapVC != null && !thisHapVC.isSymbolic() ) { // something was found at this location on this haplotype - if( thisVC == null ) { - thisVC = thisHapVC; - } else if( !thisHapVC.hasSameAllelesAs( thisVC ) ) { - isBiallelic = false; - break; - } - } - final VariantContext nextHapVC = h.getEventMap().get(nextStart); - if( nextHapVC != null && !nextHapVC.isSymbolic() ) { // something was found at the next location on this haplotype - if( nextVC == null ) { - nextVC = nextHapVC; - } else if( !nextHapVC.hasSameAllelesAs( nextVC ) ) { - isBiallelic = false; - break; - } - } - // count up the co-occurrences of the events for the R^2 calculation - for( final String sample : samples ) { - final double haplotypeLikelihood = LikelihoodCalculationEngine.computeDiploidHaplotypeLikelihoods( Collections.singleton(sample), haplotypeReadMap, Collections.singletonList(Allele.create(h, true)) )[0][0]; - if( thisHapVC == null ) { - if( nextHapVC == null ) { x11 = MathUtils.approximateLog10SumLog10(x11, haplotypeLikelihood); } - else { x12 = MathUtils.approximateLog10SumLog10(x12, haplotypeLikelihood); } - } else { - if( nextHapVC == null ) { x21 = MathUtils.approximateLog10SumLog10(x21, haplotypeLikelihood); } - else { x22 = MathUtils.approximateLog10SumLog10(x22, haplotypeLikelihood); } - } - } - } - if( thisVC == null || nextVC == null ) { - continue; - } - if( isBiallelic ) { - final double R2 = calculateR2LD( Math.pow(10.0, x11), Math.pow(10.0, x12), Math.pow(10.0, x21), Math.pow(10.0, x22) ); - if( DEBUG ) { - logger.info("Found consecutive biallelic events with R^2 = " + String.format("%.4f", R2)); - logger.info("-- " + thisVC); - logger.info("-- " + nextVC); - } - if( R2 > MERGE_EVENTS_R2_THRESHOLD ) { - - final VariantContext mergedVC = createMergedVariantContext(thisVC, nextVC, ref, refLoc); - - // remove the old event from the eventMap on every haplotype and the start pos key set, replace with merged event - for( final Haplotype h : haplotypes ) { - if( h.getEventMap().containsKey(thisStart) && h.getEventMap().containsKey(nextStart) ) { - h.getEventMap().remove(thisStart); - h.getEventMap().remove(nextStart); - h.getEventMap().put(mergedVC.getStart(), mergedVC); - } - } - startPosKeySet.add(mergedVC.getStart()); - boolean containsStart = false; - boolean containsNext = false; - for( final Haplotype h : haplotypes ) { - final Map eventMap = h.getEventMap(); - if( eventMap.containsKey(thisStart) ) { containsStart = true; } - if( eventMap.containsKey(nextStart) ) { containsNext = true; } - } - if(!containsStart) { startPosKeySet.remove(thisStart); } - if(!containsNext) { startPosKeySet.remove(nextStart); } - - if( DEBUG ) { logger.info("====> " + mergedVC); } - mapWasUpdated = true; - break; // break out of tree set iteration since it was just updated, start over from the beginning and keep merging events - } - } - } - thisStart = nextStart; - } - } - } - - // BUGBUG: make this merge function more general - protected static VariantContext createMergedVariantContext( final VariantContext thisVC, final VariantContext nextVC, final byte[] ref, final GenomeLoc refLoc ) { - final int thisStart = thisVC.getStart(); - final int nextStart = nextVC.getStart(); - byte[] refBases = new byte[]{}; - byte[] altBases = new byte[]{}; - refBases = ArrayUtils.addAll(refBases, thisVC.getReference().getBases()); - altBases = ArrayUtils.addAll(altBases, thisVC.getAlternateAllele(0).getBases()); - int locus; - for( locus = thisStart + refBases.length; locus < nextStart; locus++ ) { - final byte refByte = ref[locus - refLoc.getStart()]; - refBases = ArrayUtils.add(refBases, refByte); - altBases = ArrayUtils.add(altBases, refByte); - } - refBases = ArrayUtils.addAll(refBases, ArrayUtils.subarray(nextVC.getReference().getBases(), locus > nextStart ? 1 : 0, nextVC.getReference().getBases().length)); // special case of deletion including the padding base of consecutive indel - altBases = ArrayUtils.addAll(altBases, nextVC.getAlternateAllele(0).getBases()); - - int iii = 0; - if( refBases.length == altBases.length ) { // insertion + deletion of same length creates an MNP --> trim common prefix bases off the beginning of the allele - while( iii < refBases.length && refBases[iii] == altBases[iii] ) { iii++; } - } - final List mergedAlleles = new ArrayList(); - mergedAlleles.add( Allele.create( ArrayUtils.subarray(refBases, iii, refBases.length), true ) ); - mergedAlleles.add( Allele.create( ArrayUtils.subarray(altBases, iii, altBases.length), false ) ); - return new VariantContextBuilder("merged", thisVC.getChr(), thisVC.getStart() + iii, nextVC.getEnd(), mergedAlleles).make(); - } - - protected static double calculateR2LD( final double x11, final double x12, final double x21, final double x22 ) { - final double total = x11 + x12 + x21 + x22; - final double pa1b1 = x11 / total; - final double pa1b2 = x12 / total; - final double pa2b1 = x21 / total; - final double pa1 = pa1b1 + pa1b2; - final double pb1 = pa1b1 + pa2b1; - return ((pa1b1 - pa1*pb1) * (pa1b1 - pa1*pb1)) / ( pa1 * (1.0 - pa1) * pb1 * (1.0 - pb1) ); - } - protected static Map> createAlleleMapper( final Map mergeMap, final Map> eventMap ) { final Map> alleleMapper = new LinkedHashMap>(); for( final Map.Entry entry : mergeMap.entrySet() ) { @@ -616,8 +457,8 @@ public class GenotypingEngine { alleles.add(h.getArtificialRefAllele()); alleles.add(h.getArtificialAltAllele()); final Event artificialVC = new Event( (new VariantContextBuilder()).source("artificialHaplotype") - .alleles(alleles) - .loc(refVC.getChr(), refVC.getStart(), refVC.getStart() + h.getArtificialRefAllele().length() - 1).make() ); + .alleles(alleles) + .loc(refVC.getChr(), refVC.getStart(), refVC.getStart() + h.getArtificialRefAllele().length() - 1).make() ); if( eventMapper.containsKey(artificialVC) ) { eventMapper.get(artificialVC).add(h); } @@ -711,7 +552,7 @@ public class GenotypingEngine { @Deprecated protected static Map generateVCsFromAlignment( final Haplotype haplotype, final byte[] ref, final GenomeLoc refLoc, final String sourceNameToAdd ) { - return new EventExtractor(haplotype, ref, refLoc, sourceNameToAdd); + return new EventMap(haplotype, ref, refLoc, sourceNameToAdd); } protected static boolean containsVCWithMatchingAlleles( final List list, final VariantContext vcToTest ) { diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index a6b19826b..53fffec61 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -78,6 +78,8 @@ import org.broadinstitute.sting.utils.fragments.FragmentCollection; import org.broadinstitute.sting.utils.fragments.FragmentUtils; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.utils.haplotype.Haplotype; +import org.broadinstitute.sting.utils.haplotype.HaplotypeBaseComparator; +import org.broadinstitute.sting.utils.haplotype.LDMerger; import org.broadinstitute.sting.utils.haplotypeBAMWriter.HaplotypeBAMWriter; import org.broadinstitute.sting.utils.help.DocumentedGATKFeature; import org.broadinstitute.sting.utils.help.HelpConstants; @@ -302,6 +304,10 @@ public class HaplotypeCaller extends ActiveRegionWalker implem @Argument(fullName="useLowQualityBasesForAssembly", shortName="useLowQualityBasesForAssembly", doc="If specified, we will include low quality bases when doing the assembly", required = false) protected boolean useLowQualityBasesForAssembly = false; + @Hidden + @Argument(fullName="useNewLDMerger", shortName="useNewLDMerger", doc="If specified, we will include low quality bases when doing the assembly", required = false) + protected boolean useNewLDMerger = false; + // the UG engines private UnifiedGenotyperEngine UG_engine = null; private UnifiedGenotyperEngine UG_engine_simple_genotyper = null; @@ -412,7 +418,10 @@ public class HaplotypeCaller extends ActiveRegionWalker implem if ( useLowQualityBasesForAssembly ) assemblyEngine.setMinBaseQualityToUseInAssembly((byte)1); likelihoodCalculationEngine = new LikelihoodCalculationEngine( (byte)gcpHMM, DEBUG, pairHMM ); - genotypingEngine = new GenotypingEngine( DEBUG, annotationEngine, USE_FILTERED_READ_MAP_FOR_ANNOTATIONS ); + + final LDMerger ldMerger = new LDMerger(DEBUG, useNewLDMerger ? 10 : 10, useNewLDMerger ? 1 : 10); + + genotypingEngine = new GenotypingEngine( DEBUG, annotationEngine, USE_FILTERED_READ_MAP_FOR_ANNOTATIONS, ldMerger ); if ( bamWriter != null ) haplotypeBAMWriter = HaplotypeBAMWriter.create(bamWriterType, bamWriter, getToolkit().getSAMFileHeader()); @@ -545,7 +554,7 @@ public class HaplotypeCaller extends ActiveRegionWalker implem if( activeRegion.size() == 0 ) { return 1; } // no reads remain after filtering so nothing else to do! // sort haplotypes to take full advantage of haplotype start offset optimizations in PairHMM - Collections.sort( haplotypes, new Haplotype.HaplotypeBaseComparator() ); + Collections.sort( haplotypes, new HaplotypeBaseComparator() ); if (dontGenotype) return 1; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java index df1c9aabc..543b23d9c 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java @@ -158,17 +158,17 @@ public class LikelihoodCalculationEngine { @Ensures({"result.length == result[0].length", "result.length == alleleOrdering.size()"}) public static double[][] computeDiploidHaplotypeLikelihoods( final String sample, final Map stratifiedReadMap, - final List alleleOrdering ) { - final TreeSet sampleSet = new TreeSet(); - sampleSet.add(sample); - return computeDiploidHaplotypeLikelihoods(sampleSet, stratifiedReadMap, alleleOrdering); + final List alleleOrdering, + final boolean normalize ) { + return computeDiploidHaplotypeLikelihoods(Collections.singleton(sample), stratifiedReadMap, alleleOrdering, normalize); } @Requires({"alleleOrdering.size() > 0"}) @Ensures({"result.length == result[0].length", "result.length == alleleOrdering.size()"}) public static double[][] computeDiploidHaplotypeLikelihoods( final Set samples, final Map stratifiedReadMap, - final List alleleOrdering ) { + final List alleleOrdering, + final boolean normalize) { final int numHaplotypes = alleleOrdering.size(); final double[][] haplotypeLikelihoodMatrix = new double[numHaplotypes][numHaplotypes]; @@ -195,7 +195,7 @@ public class LikelihoodCalculationEngine { } // normalize the diploid likelihoods matrix - return normalizeDiploidLikelihoodMatrixFromLog10( haplotypeLikelihoodMatrix ); + return normalize ? normalizeDiploidLikelihoodMatrixFromLog10( haplotypeLikelihoodMatrix ) : haplotypeLikelihoodMatrix; } @Requires({"likelihoodMatrix.length == likelihoodMatrix[0].length"}) @@ -230,7 +230,7 @@ public class LikelihoodCalculationEngine { final List haplotypesAsAlleles = new ArrayList(); for( final Haplotype h : haplotypes ) { haplotypesAsAlleles.add(Allele.create(h, true)); } - final double[][] haplotypeLikelihoodMatrix = computeDiploidHaplotypeLikelihoods( sampleKeySet, stratifiedReadMap, haplotypesAsAlleles ); // all samples pooled together + final double[][] haplotypeLikelihoodMatrix = computeDiploidHaplotypeLikelihoods( sampleKeySet, stratifiedReadMap, haplotypesAsAlleles, true ); // all samples pooled together int hap1 = 0; int hap2 = 0; diff --git a/protected/java/src/org/broadinstitute/sting/utils/haplotype/HaplotypeLDCalculator.java b/protected/java/src/org/broadinstitute/sting/utils/haplotype/HaplotypeLDCalculator.java new file mode 100644 index 000000000..4609c3209 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/utils/haplotype/HaplotypeLDCalculator.java @@ -0,0 +1,194 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.utils.haplotype; + +import com.google.java.contract.Requires; +import org.broadinstitute.sting.gatk.walkers.haplotypecaller.LikelihoodCalculationEngine; +import org.broadinstitute.sting.utils.MathUtils; +import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.variant.variantcontext.Allele; +import org.broadinstitute.variant.variantcontext.VariantContext; + +import java.util.*; + +/** + * Computes the likelihood based probability that haplotypes for first and second variant contexts + * only appear in their fully linked form (x11 and x22) given a set of haplotypes where they might occur + * and read likelihoods per sample + * + * User: depristo + * Date: 3/29/13 + * Time: 9:23 AM + */ +public class HaplotypeLDCalculator { + private final List haplotypes; + private final Map haplotypeReadMap; + private List> haplotypeLikelihoodsPerSample = null; + + // linear contigency table with table[0] == [0][0], table[1] = [0][1], table[2] = [1][0], table[3] = [1][1] + private final double[] table = new double[4]; + + /** + * For testing + */ + protected HaplotypeLDCalculator() { + haplotypes = Collections.emptyList(); + haplotypeReadMap = Collections.emptyMap(); + } + + public HaplotypeLDCalculator(List haplotypes, Map haplotypeReadMap) { + this.haplotypes = haplotypes; + this.haplotypeReadMap = haplotypeReadMap; + } + + /** + * Construct the cached list of summed haplotype likelihoods per sample if it + * hasn't already been computed. This data structure is lazy created but only + * needs to be made once when we make 1 merge decision as the data doesn't change + * no matter how many calls to computeProbOfBeingPhased + */ + private void buildHaplotypeLikelihoodsPerSampleIfNecessary() { + if ( haplotypeLikelihoodsPerSample == null ) { + // do the lazy computation + final Set samples = haplotypeReadMap.keySet(); + haplotypeLikelihoodsPerSample = new LinkedList>(); + for( final String sample : samples ) { + final Map map = new HashMap(haplotypes.size()); + for( final Haplotype h : haplotypes ) { + // count up the co-occurrences of the events for the R^2 calculation + final double haplotypeLikelihood = LikelihoodCalculationEngine.computeDiploidHaplotypeLikelihoods(sample, haplotypeReadMap, Collections.singletonList(Allele.create(h, true)), false)[0][0]; + map.put(h, haplotypeLikelihood); + } + haplotypeLikelihoodsPerSample.add(map); + } + } + } + + /** + * Compute the likelihood based probability that that haplotypes for first and second are only x11 and x22 + * + * As opposed to the hypothesis that all four haplotypes (x11, x12, x21, and x22) exist in the population + * + * @param first a non-null VariantContext + * @param second a non-null VariantContext + * @return the probability that only x11 and x22 exist among the samples + */ + protected double computeProbOfBeingPhased(final VariantContext first, final VariantContext second) { + buildHaplotypeLikelihoodsPerSampleIfNecessary(); + + Arrays.fill(table, Double.NEGATIVE_INFINITY); + + for ( final Map entry : haplotypeLikelihoodsPerSample ) { + for ( final Map.Entry haplotypeLikelihood : entry.entrySet() ) { + final Haplotype h = haplotypeLikelihood.getKey(); + // count up the co-occurrences of the events for the R^2 calculation + final VariantContext thisHapVC = h.getEventMap().get(first.getStart()); + final VariantContext nextHapVC = h.getEventMap().get(second.getStart()); // TODO -- add function to take a VC + final int i = thisHapVC == null ? 0 : 1; + final int j = nextHapVC == null ? 0 : 1; + final int index = 2 * i + j; + table[index] = MathUtils.approximateLog10SumLog10(table[index], haplotypeLikelihood.getValue()); + } + } + + return pPhased(table); + } + + /** + * Compute probability that two variants are in phase with each other and that no + * compound hets exist in the population. + * + * Implemented as a likelihood ratio test of the hypothesis: + * + * x11 and x22 are the only haplotypes in the populations + * + * vs. + * + * all four haplotype combinations (x11, x12, x21, and x22) all exist in the population. + * + * Now, since we have to have both variants in the population, we exclude the x11 & x11 state. So the + * p of having just x11 and x22 is P(x11 & x22) + p(x22 & x22). + * + * Alternatively, we might have any configuration that gives us both 1 and 2 alts, which are: + * + * - P(x11 & x12 & x21) -- we have hom-ref and both hets + * - P(x22 & x12 & x21) -- we have hom-alt and both hets + * - P(x22 & x12) -- one haplotype is 22 and the other is het 12 + * - P(x22 & x21) -- one haplotype is 22 and the other is het 21 + * + * The probability is just p11_22 / (p11_22 + p hets) + * + * @table linear contigency table with table[0] == [0][0], table[1] = [0][1], table[2] = [1][0], table[3] = [1][1] + * doesn't have to be normalized as this function does the normalization internally + * @return the real space probability that the data is phased + */ + @Requires("table.length == 4") + protected double pPhased( double[] table ) { + final double[] normTable = MathUtils.normalizeFromLog10(table, true); + + final double x11 = normTable[0], x12 = normTable[1], x21 = normTable[2], x22 = normTable[3]; + + // probability that we are only x11 && x22 + final double p11_22 = MathUtils.approximateLog10SumLog10(x11 + x22, x22 + x22); + + // probability of having any of the other pairs + final double p11_12_21 = MathUtils.approximateLog10SumLog10(x11 + x12, x11 + x21, x12 + x21); + final double p22_12_21 = MathUtils.approximateLog10SumLog10(x22 + x12, x22 + x21, x12 + x21); + final double p22_12 = x22 + x12; + final double p22_21 = x22 + x21; + final double pOthers = MathUtils.approximateLog10SumLog10(new double[]{p11_12_21, p22_12_21, p22_12, p22_21}); + + // probability of being phases is the ratio of p11_22 / pOthers which in log space is just a substraction + final double log10phased = p11_22 - (MathUtils.approximateLog10SumLog10(p11_22, pOthers)); + + return Math.pow(10.0, log10phased); + } + + protected double pPhasedTest( final double x11, final double x12, final double x21, final double x22 ) { + return pPhased(new double[]{x11, x12, x21, x22}); + } +} diff --git a/protected/java/src/org/broadinstitute/sting/utils/haplotype/LDMerger.java b/protected/java/src/org/broadinstitute/sting/utils/haplotype/LDMerger.java new file mode 100644 index 000000000..ea00a1901 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/utils/haplotype/LDMerger.java @@ -0,0 +1,303 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.utils.haplotype; + +import org.apache.commons.lang.ArrayUtils; +import org.apache.log4j.Logger; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.variant.variantcontext.Allele; +import org.broadinstitute.variant.variantcontext.VariantContext; +import org.broadinstitute.variant.variantcontext.VariantContextBuilder; + +import java.util.*; + +/** + * Merges VariantContexts in a series of haplotypes according to their pairwise LD + * + * User: depristo + * Date: 3/28/13 + * Time: 6:17 PM + */ +public class LDMerger { + private final static Logger logger = Logger.getLogger(LDMerger.class); + + private final boolean DEBUG; + private final int minSamplesToMergeSNPs; + private final int minSamplesToMergeOtherEvents; + + public LDMerger(boolean DEBUG, int minSamplesToMergeSNPs, int minSamplesToMergeOtherEvents) { + this.DEBUG = DEBUG; + this.minSamplesToMergeSNPs = minSamplesToMergeSNPs; + this.minSamplesToMergeOtherEvents = minSamplesToMergeOtherEvents; + } + + protected LDMerger() { + this(false, 1, 1); + } + + // TODO -- should be class arguments and static variables in HC + protected final static int MAX_DISTANCE_BETWEEN_SNPS_TO_MERGE = 6; + protected final static int MAX_DISTANCE_BETWEEN_OTHER_EVENTS_TO_MERGE = 25; + + /** + * We require 99% confidence that only the phased haplotypes exist in the population to merge the records + */ + protected final static double MERGE_EVENTS_PROB_PHASED_THRESHOLD = 0.99; + + /** + * Merge as many events among the haplotypes as possible based on pairwise LD among variants + * + * @param haplotypes a list of haplotypes whose events we want to merge + * @param haplotypeReadMap map from sample name -> read likelihoods for each haplotype + * @param startPosKeySet a set of starting positions of all events among the haplotypes + * @param ref the reference bases + * @param refLoc the span of the reference bases + */ + public boolean mergeConsecutiveEventsBasedOnLD( final List haplotypes, + final Map haplotypeReadMap, + final TreeSet startPosKeySet, + final byte[] ref, + final GenomeLoc refLoc ) { + if ( haplotypes == null ) throw new IllegalArgumentException("haplotypes cannot be null"); + if ( haplotypeReadMap == null ) throw new IllegalArgumentException("haplotypeReadMap cannot be null"); + if ( startPosKeySet == null ) throw new IllegalArgumentException("startPosKeySet cannot be null"); + if ( ref == null ) throw new IllegalArgumentException("ref cannot be null"); + if ( refLoc == null ) throw new IllegalArgumentException("refLoc cannot be null"); + if ( refLoc.size() != ref.length ) throw new IllegalArgumentException("refLoc size " + refLoc.size() + " != ref.length " + ref.length + " at " + refLoc); + + if( startPosKeySet.size() <= 1 ) { return false; } + + final int nSamples = haplotypeReadMap.keySet().size(); + final HaplotypeLDCalculator r2Calculator = new HaplotypeLDCalculator(haplotypes, haplotypeReadMap); + boolean somethingWasMerged = false; + boolean mapWasUpdated = true; + while( mapWasUpdated ) { + mapWasUpdated = mergeConsecutiveEventsBasedOnLDOnce(haplotypes, r2Calculator, nSamples, startPosKeySet, ref, refLoc); + somethingWasMerged |= mapWasUpdated; + } + return somethingWasMerged; + } + + /** + * Merge the next pair of events, if possible + * + * @param haplotypes a list of haplotypes whose events we want to merge + * @param ldCalculator calculates R^2 for pairs of events on demand + * @param startPosKeySet a set of starting positions of all events among the haplotypes + * @param ref the reference bases + * @param refLoc the span of the reference bases + * @return true if something was merged, false otherwise + */ + protected boolean mergeConsecutiveEventsBasedOnLDOnce( final List haplotypes, + final HaplotypeLDCalculator ldCalculator, + final int nSamples, + final TreeSet startPosKeySet, + final byte[] ref, + final GenomeLoc refLoc ) { + // loop over the set of start locations and consider pairs that start near each other + final Iterator iter = startPosKeySet.iterator(); + int thisStart = iter.next(); + while( iter.hasNext() ) { + final int nextStart = iter.next(); + final LDMergeData toMerge = getPairOfEventsToMerge(haplotypes, thisStart, nextStart); + + if ( toMerge.canBeMerged(nSamples) ) { + final double pPhased = ldCalculator.computeProbOfBeingPhased(toMerge.firstVC, toMerge.secondVC); + + if( DEBUG ) { + logger.info("Found consecutive biallelic events with R^2 = " + String.format("%.4f", pPhased)); + logger.info("-- " + toMerge.firstVC); + logger.info("-- " + toMerge.secondVC); + } + + if( pPhased > MERGE_EVENTS_PROB_PHASED_THRESHOLD) { + final VariantContext mergedVC = createMergedVariantContext(toMerge.firstVC, toMerge.secondVC, ref, refLoc); + // if for some reason the merging resulting in a bad allele, mergedVC will be null, and we will just remove first and second + replaceVariantContextsInMap(haplotypes, startPosKeySet, mergedVC, toMerge.firstVC, toMerge.secondVC); + return true; // break out of tree set iteration since it was just updated, start over from the beginning and keep merging events + } + } + + thisStart = nextStart; + } + + return false; + } + + /** + * Info about potential LD merge of two variant contexts + */ + private class LDMergeData { + VariantContext firstVC = null, secondVC = null; + boolean canBeMerged = true; + + /** Tell this object that it cant be merged for some reason */ + public LDMergeData cantBeMerged() { + canBeMerged = false; + return this; + } + + /** + * Can these two events be merged + * @param nSamples the number of samples we're considering + * @return true if we can merge our two variant contexts + */ + public boolean canBeMerged(final int nSamples) { + if ( ! canBeMerged || firstVC == null || secondVC == null ) + return false; + + final int distance = secondVC.getStart() - firstVC.getEnd(); + if ( firstVC.isSNP() && secondVC.isSNP() ) { + return nSamples >= minSamplesToMergeSNPs && distance <= MAX_DISTANCE_BETWEEN_SNPS_TO_MERGE; + } else { + return nSamples >= minSamplesToMergeOtherEvents && distance <= MAX_DISTANCE_BETWEEN_OTHER_EVENTS_TO_MERGE; + } + } + } + + /** + * Get the information about the potential merge of two events starting at thisStart and nextStart + * @param haplotypes our haplotypes + * @param thisStart the starting position of the first event to merge + * @param nextStart the starting position of the next event to merge + * @return + */ + private LDMergeData getPairOfEventsToMerge(final List haplotypes, final int thisStart, final int nextStart) { + final LDMergeData mergeData = new LDMergeData(); + + for( final Haplotype h : haplotypes ) { + // only make complex substitutions out of consecutive biallelic sites + final VariantContext thisHapVC = h.getEventMap().get(thisStart); + if( thisHapVC != null && !thisHapVC.isSymbolic() ) { // something was found at this location on this haplotype + if( mergeData.firstVC == null ) { + mergeData.firstVC = thisHapVC; + } else if( !thisHapVC.hasSameAllelesAs( mergeData.firstVC) ) { + return mergeData.cantBeMerged(); + } + } + final VariantContext nextHapVC = h.getEventMap().get(nextStart); + if( nextHapVC != null && !nextHapVC.isSymbolic() ) { // something was found at the next location on this haplotype + if( mergeData.secondVC == null ) { + mergeData.secondVC = nextHapVC; + } else if( !nextHapVC.hasSameAllelesAs( mergeData.secondVC) ) { + return mergeData.cantBeMerged(); + } + } + } + + // don't try to merge overlapping events + if ( mergeData.firstVC != null && mergeData.secondVC != null && mergeData.firstVC.getEnd() >= mergeData.secondVC.getStart() ) + return mergeData.cantBeMerged(); + + return mergeData; + } + + // BUGBUG: make this merge function more general + protected VariantContext createMergedVariantContext( final VariantContext thisVC, final VariantContext nextVC, final byte[] ref, final GenomeLoc refLoc ) { + final int thisStart = thisVC.getStart(); + final int nextStart = nextVC.getStart(); + byte[] refBases = new byte[]{}; + byte[] altBases = new byte[]{}; + refBases = ArrayUtils.addAll(refBases, thisVC.getReference().getBases()); + altBases = ArrayUtils.addAll(altBases, thisVC.getAlternateAllele(0).getBases()); + int locus; + for( locus = thisStart + refBases.length; locus < nextStart; locus++ ) { + final byte refByte = ref[locus - refLoc.getStart()]; + refBases = ArrayUtils.add(refBases, refByte); + altBases = ArrayUtils.add(altBases, refByte); + } + refBases = ArrayUtils.addAll(refBases, ArrayUtils.subarray(nextVC.getReference().getBases(), locus > nextStart ? 1 : 0, nextVC.getReference().getBases().length)); // special case of deletion including the padding base of consecutive indel + altBases = ArrayUtils.addAll(altBases, nextVC.getAlternateAllele(0).getBases()); + + int iii = 0; + if( refBases.length == altBases.length ) { // insertion + deletion of same length creates an MNP --> trim common prefix bases off the beginning of the allele + while( iii < refBases.length && refBases[iii] == altBases[iii] ) { iii++; } + if ( iii == refBases.length ) { + // we've become a null allele, such as with CA/C + A/AA -> CA/CA => after trimming there's nothing left + // so return a null variant context so we can eliminate the variants from consideration + return null; + } + } + + + final Allele refAllele = Allele.create( ArrayUtils.subarray(refBases, iii, refBases.length), true ); + final Allele altAllele = Allele.create( ArrayUtils.subarray(altBases, iii, altBases.length), false ); + return new VariantContextBuilder("merged", thisVC.getChr(), thisVC.getStart() + iii, nextVC.getEnd(), Arrays.asList(refAllele, altAllele)).make(); + } + + /** + * Update the event maps in all haplotypes to replace a replacement of update1 and 2 with replacement + * + * @param haplotypes the haplotypes whose event maps we need to update + * @param startPosKeySet a sorted set of start positions that we must update + * @param replacement a VariantContext to replace update1 and update2 with. Can be null, indicating that we just want to remove update1 and update2 + * @param update1 the first VC we want to update + * @param update2 the second VC we want to update + */ + private void replaceVariantContextsInMap(final List haplotypes, + final TreeSet startPosKeySet, + final VariantContext replacement, + final VariantContext update1, final VariantContext update2) { + // remove the old event from the eventMap on every haplotype and the start pos key set, replace with merged event + for( final Haplotype h : haplotypes ) { + // if we had both events, add replacement. In some cases the haplotype may not have both + // events but they were still merged because the haplotype isn't a particularly informative + // haplotype in any case. The order of operations here is important because we are modifying the map + final boolean shouldAdd = h.getEventMap().containsKey(update1.getStart()) && h.getEventMap().containsKey(update2.getStart()); + h.getEventMap().remove(update1.getStart()); + h.getEventMap().remove(update2.getStart()); + if ( shouldAdd && replacement != null ) { + h.getEventMap().addVC(replacement, false); // cannot merge we other events at the same position + } + } + + startPosKeySet.remove(update1.getStart()); + startPosKeySet.remove(update2.getStart()); + if ( replacement != null ) startPosKeySet.add(replacement.getStart()); + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngineUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngineUnitTest.java index 9fb75463a..6a66d9845 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngineUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngineUnitTest.java @@ -279,148 +279,6 @@ public class GenotypingEngineUnitTest extends BaseTest { Assert.assertTrue(compareVCMaps(calculatedMap, expectedMap)); } - /** - * Tests that we get the right values from the R^2 calculation - */ - @Test - public void testCalculateR2LD() { - logger.warn("Executing testCalculateR2LD"); - - Assert.assertEquals(GenotypingEngine.calculateR2LD(1,1,1,1), 0.0, 0.00001); - Assert.assertEquals(GenotypingEngine.calculateR2LD(100,100,100,100), 0.0, 0.00001); - Assert.assertEquals(GenotypingEngine.calculateR2LD(1,0,0,1), 1.0, 0.00001); - Assert.assertEquals(GenotypingEngine.calculateR2LD(100,0,0,100), 1.0, 0.00001); - Assert.assertEquals(GenotypingEngine.calculateR2LD(1,2,3,4), (0.1 - 0.12) * (0.1 - 0.12) / (0.3 * 0.7 * 0.4 * 0.6), 0.00001); - } - - @Test - public void testCreateMergedVariantContext() { - logger.warn("Executing testCreateMergedVariantContext"); - - final byte[] ref = "AATTCCGGAATTCCGGAATT".getBytes(); - final GenomeLoc refLoc = genomeLocParser.createGenomeLoc("2", 1700, 1700 + ref.length); - - // SNP + SNP = simple MNP - VariantContext thisVC = new VariantContextBuilder().loc("2", 1703, 1703).alleles("T","G").make(); - VariantContext nextVC = new VariantContextBuilder().loc("2", 1704, 1704).alleles("C","G").make(); - VariantContext truthVC = new VariantContextBuilder().loc("2", 1703, 1704).alleles("TC","GG").source("merged").make(); - VariantContext mergedVC = GenotypingEngine.createMergedVariantContext(thisVC, nextVC, ref, refLoc); - logger.warn(truthVC + " == " + mergedVC); - Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); - Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); - Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); - - // SNP + ref + SNP = MNP with ref base gap - thisVC = new VariantContextBuilder().loc("2", 1703, 1703).alleles("T","G").make(); - nextVC = new VariantContextBuilder().loc("2", 1705, 1705).alleles("C","G").make(); - truthVC = new VariantContextBuilder().loc("2", 1703, 1705).alleles("TCC","GCG").source("merged").make(); - mergedVC = GenotypingEngine.createMergedVariantContext(thisVC, nextVC, ref, refLoc); - logger.warn(truthVC + " == " + mergedVC); - Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); - Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); - Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); - - // insertion + SNP - thisVC = new VariantContextBuilder().loc("2", 1703, 1703).alleles("T","TAAAAA").make(); - nextVC = new VariantContextBuilder().loc("2", 1705, 1705).alleles("C","G").make(); - truthVC = new VariantContextBuilder().loc("2", 1703, 1705).alleles("TCC","TAAAAACG").source("merged").make(); - mergedVC = GenotypingEngine.createMergedVariantContext(thisVC, nextVC, ref, refLoc); - logger.warn(truthVC + " == " + mergedVC); - Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); - Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); - Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); - - // SNP + insertion - thisVC = new VariantContextBuilder().loc("2", 1703, 1703).alleles("T","G").make(); - nextVC = new VariantContextBuilder().loc("2", 1705, 1705).alleles("C","CAAAAA").make(); - truthVC = new VariantContextBuilder().loc("2", 1703, 1705).alleles("TCC","GCCAAAAA").source("merged").make(); - mergedVC = GenotypingEngine.createMergedVariantContext(thisVC, nextVC, ref, refLoc); - logger.warn(truthVC + " == " + mergedVC); - Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); - Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); - Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); - - // deletion + SNP - thisVC = new VariantContextBuilder().loc("2", 1703, 1704).alleles("TC","T").make(); - nextVC = new VariantContextBuilder().loc("2", 1705, 1705).alleles("C","G").make(); - truthVC = new VariantContextBuilder().loc("2", 1703, 1705).alleles("TCC","TG").source("merged").make(); - mergedVC = GenotypingEngine.createMergedVariantContext(thisVC, nextVC, ref, refLoc); - logger.warn(truthVC + " == " + mergedVC); - Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); - Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); - Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); - - // SNP + deletion - thisVC = new VariantContextBuilder().loc("2", 1703, 1703).alleles("T","G").make(); - nextVC = new VariantContextBuilder().loc("2", 1705, 1706).alleles("CG","C").make(); - truthVC = new VariantContextBuilder().loc("2", 1703, 1706).alleles("TCCG","GCC").source("merged").make(); - mergedVC = GenotypingEngine.createMergedVariantContext(thisVC, nextVC, ref, refLoc); - logger.warn(truthVC + " == " + mergedVC); - Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); - Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); - Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); - - // insertion + deletion = MNP - thisVC = new VariantContextBuilder().loc("2", 1703, 1703).alleles("T","TA").make(); - nextVC = new VariantContextBuilder().loc("2", 1705, 1706).alleles("CG","C").make(); - truthVC = new VariantContextBuilder().loc("2", 1704, 1706).alleles("CCG","ACC").source("merged").make(); - mergedVC = GenotypingEngine.createMergedVariantContext(thisVC, nextVC, ref, refLoc); - logger.warn(truthVC + " == " + mergedVC); - Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); - Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); - Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); - - // insertion + deletion - thisVC = new VariantContextBuilder().loc("2", 1703, 1703).alleles("T","TAAAAA").make(); - nextVC = new VariantContextBuilder().loc("2", 1705, 1706).alleles("CG","C").make(); - truthVC = new VariantContextBuilder().loc("2", 1703, 1706).alleles("TCCG","TAAAAACC").source("merged").make(); - mergedVC = GenotypingEngine.createMergedVariantContext(thisVC, nextVC, ref, refLoc); - logger.warn(truthVC + " == " + mergedVC); - Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); - Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); - Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); - - // insertion + insertion - thisVC = new VariantContextBuilder().loc("2", 1703, 1703).alleles("T","TA").make(); - nextVC = new VariantContextBuilder().loc("2", 1705, 1705).alleles("C","CA").make(); - truthVC = new VariantContextBuilder().loc("2", 1703, 1705).alleles("TCC","TACCA").source("merged").make(); - mergedVC = GenotypingEngine.createMergedVariantContext(thisVC, nextVC, ref, refLoc); - logger.warn(truthVC + " == " + mergedVC); - Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); - Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); - Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); - - // deletion + deletion - thisVC = new VariantContextBuilder().loc("2", 1701, 1702).alleles("AT","A").make(); - nextVC = new VariantContextBuilder().loc("2", 1705, 1706).alleles("CG","C").make(); - truthVC = new VariantContextBuilder().loc("2", 1701, 1706).alleles("ATTCCG","ATCC").source("merged").make(); - mergedVC = GenotypingEngine.createMergedVariantContext(thisVC, nextVC, ref, refLoc); - logger.warn(truthVC + " == " + mergedVC); - Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); - Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); - Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); - - // deletion + insertion (abutting) - thisVC = new VariantContextBuilder().loc("2", 1701, 1702).alleles("AT","A").make(); - nextVC = new VariantContextBuilder().loc("2", 1702, 1702).alleles("T","GCGCGC").make(); - truthVC = new VariantContextBuilder().loc("2", 1701, 1702).alleles("AT","AGCGCGC").source("merged").make(); - mergedVC = GenotypingEngine.createMergedVariantContext(thisVC, nextVC, ref, refLoc); - logger.warn(truthVC + " == " + mergedVC); - Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); - Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); - Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); - - // complex + complex - thisVC = new VariantContextBuilder().loc("2", 1703, 1704).alleles("TC","AAA").make(); - nextVC = new VariantContextBuilder().loc("2", 1706, 1707).alleles("GG","AC").make(); - truthVC = new VariantContextBuilder().loc("2", 1703, 1707).alleles("TCCGG","AAACAC").source("merged").make(); - mergedVC = GenotypingEngine.createMergedVariantContext(thisVC, nextVC, ref, refLoc); - logger.warn(truthVC + " == " + mergedVC); - Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); - Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); - Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); - } - /** * Private function to compare Map of VCs, it only checks the types and start locations of the VariantContext */ diff --git a/protected/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeBaseComparatorUnitTest.java b/protected/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeBaseComparatorUnitTest.java new file mode 100644 index 000000000..26384c190 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeBaseComparatorUnitTest.java @@ -0,0 +1,77 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.utils.haplotype; + +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.utils.Utils; +import org.testng.Assert; +import org.testng.annotations.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +public class HaplotypeBaseComparatorUnitTest extends BaseTest { + @Test + public void testComparison() { + final List rawStrings = Arrays.asList("A", "C", "AC", "CT", "GTC", "ACGT"); + final List lexStrings = new ArrayList(rawStrings); + Collections.sort(lexStrings); + + for ( final List seqs : Utils.makePermutations(lexStrings, lexStrings.size(), false) ) { + final List haps = new ArrayList(seqs.size()); + for ( final String seq : seqs ) { + haps.add(new Haplotype(seq.getBytes(), false)); + } + + Collections.sort(haps, new HaplotypeBaseComparator()); + for ( int i = 0; i < lexStrings.size(); i++ ) + Assert.assertEquals(haps.get(i).getBaseString(), lexStrings.get(i), "Failed sort " + haps + " expected " + lexStrings); + } + } +} diff --git a/protected/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeLDCalculatorUnitTest.java b/protected/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeLDCalculatorUnitTest.java new file mode 100644 index 000000000..3c3452bbf --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeLDCalculatorUnitTest.java @@ -0,0 +1,118 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.utils.haplotype; + +import org.broadinstitute.sting.BaseTest; +import org.testng.Assert; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +public class HaplotypeLDCalculatorUnitTest extends BaseTest { + HaplotypeLDCalculator calculator; + + @BeforeMethod + public void setUp() throws Exception { + calculator = new HaplotypeLDCalculator(); + } + + /** + * Tests that we get the right values from the R^2 calculation + */ + @Test + public void computeProbOfBeingPhased() { + logger.warn("Executing testCalculateR2LD"); + + // See AA, AB, and BA in population + Assert.assertEquals(calculator.pPhasedTest(0, 0, 0, -100), 0, 0.00001); + + // See AA, AB, BB in population + Assert.assertTrue(calculator.pPhasedTest(0, 0, -100, 0) < 0.5); + + // See AA and BB in population + Assert.assertEquals(calculator.pPhasedTest(0, -100, -100, 0), 1, 0.00001); + + // See AA, AB, and BA but no BBs in population + Assert.assertEquals(calculator.pPhasedTest(0, -20, -40, Double.NEGATIVE_INFINITY), 0, 0.00001); + + // See BB, AB, and BA but no AAs in population, so BB is the best explanation + Assert.assertEquals(calculator.pPhasedTest(Double.NEGATIVE_INFINITY, -20, -40, 0), 1, 0.00001); + + // See only AB and BA but no AAs nor BBs in population + Assert.assertEquals(calculator.pPhasedTest(Double.NEGATIVE_INFINITY, -20, -40, Double.NEGATIVE_INFINITY), 0, 0.00001); + + // Previously bad input + Assert.assertEquals(calculator.pPhasedTest(-400, -600, -1200, Double.NEGATIVE_INFINITY), 0, 0.00001); + + // first variant is just bad, so BA and BB are both very bad, shouldn't be phased + Assert.assertEquals(calculator.pPhasedTest(0, -1000, -100, -10000), 0, 0.00001); + + // second variant is just bad, so AB and BB are both very bad, shouldn't be phased + Assert.assertEquals(calculator.pPhasedTest(0, -100, -1000, -10000), 0, 0.00001); + + // AA is very good, all all others are quite poor. Shouldn't be phased + Assert.assertEquals(calculator.pPhasedTest(0, -1000, -1000, -10000), 0, 0.00001); + + + for ( int i = -10; i > -10000; i -= 10 ) { + // only bad het states + Assert.assertTrue(calculator.pPhasedTest(0, i, i, 0) > 0.99, "Failed for " + i); + + // BB state is terrible + Assert.assertTrue(calculator.pPhasedTest(0, 0, 0, i) < 0.5, "Failed for " + i); + + // truth is AB, BA, and BB + Assert.assertTrue(calculator.pPhasedTest(i, 0, 0, 0) < 0.5, "Failed for " + i); + + // truth is AB, BA + Assert.assertTrue(calculator.pPhasedTest(i, 0, 0, i) < 0.5, "Failed for " + i); + + // Only good signal is AB, so we shouldn't be phased + Assert.assertTrue(calculator.pPhasedTest(i, i, 0, i) < 0.5, "Failed for " + i); + Assert.assertTrue(calculator.pPhasedTest(i, 0, i, i) < 0.5, "Failed for " + i); + } + } +} diff --git a/protected/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeScoreComparatorUnitTest.java b/protected/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeScoreComparatorUnitTest.java new file mode 100644 index 000000000..64a62bc02 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeScoreComparatorUnitTest.java @@ -0,0 +1,76 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.utils.haplotype; + +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.utils.Utils; +import org.testng.Assert; +import org.testng.annotations.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +public class HaplotypeScoreComparatorUnitTest extends BaseTest { + @Test + public void testComparison() { + final List scores = Arrays.asList(3.0, 2.0, 1.0); + for ( final List myScores : Utils.makePermutations(scores, scores.size(), false) ) { + final List haps = new ArrayList(myScores.size()); + for ( final double score : myScores ) { + final Haplotype h = new Haplotype("ACT".getBytes(), false); + h.setScore(score); + haps.add(h); + } + + Collections.sort(haps, new HaplotypeScoreComparator()); + for ( int i = 0; i < myScores.size(); i++ ) + Assert.assertEquals(haps.get(i).getScore(), scores.get(i)); + } + } +} diff --git a/protected/java/test/org/broadinstitute/sting/utils/haplotype/LDMergerUnitTest.java b/protected/java/test/org/broadinstitute/sting/utils/haplotype/LDMergerUnitTest.java new file mode 100644 index 000000000..a2c69e535 --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/utils/haplotype/LDMergerUnitTest.java @@ -0,0 +1,334 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.utils.haplotype; + +import net.sf.samtools.TextCigarCodec; +import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.utils.*; +import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; +import org.broadinstitute.variant.variantcontext.VariantContext; +import org.broadinstitute.variant.variantcontext.VariantContextBuilder; +import org.testng.Assert; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.DataProvider; +import org.testng.annotations.Test; + +import java.io.File; +import java.io.FileNotFoundException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.TreeSet; + +public class LDMergerUnitTest extends BaseTest { + LDMerger merger; + GenomeLocParser genomeLocParser; + + @BeforeClass + public void init() throws FileNotFoundException { + genomeLocParser = new GenomeLocParser(new CachingIndexedFastaSequenceFile(new File(b37KGReference))); + } + + @BeforeMethod + public void setUp() throws Exception { + merger = new LDMerger(); + } + + @Test + public void testCreateMergedVariantContext() { + logger.warn("Executing testCreateMergedVariantContext"); + + final byte[] ref = "AATTCCGGAATTCCGGAATT".getBytes(); + final GenomeLoc refLoc = genomeLocParser.createGenomeLoc("2", 1700, 1700 + ref.length); + + // SNP + SNP = simple MNP + VariantContext thisVC = new VariantContextBuilder().loc("2", 1703, 1703).alleles("T","G").make(); + VariantContext nextVC = new VariantContextBuilder().loc("2", 1704, 1704).alleles("C","G").make(); + VariantContext truthVC = new VariantContextBuilder().loc("2", 1703, 1704).alleles("TC","GG").source("merged").make(); + VariantContext mergedVC = merger.createMergedVariantContext(thisVC, nextVC, ref, refLoc); + logger.warn(truthVC + " == " + mergedVC); + Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); + Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); + Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); + + // SNP + ref + SNP = MNP with ref base gap + thisVC = new VariantContextBuilder().loc("2", 1703, 1703).alleles("T","G").make(); + nextVC = new VariantContextBuilder().loc("2", 1705, 1705).alleles("C","G").make(); + truthVC = new VariantContextBuilder().loc("2", 1703, 1705).alleles("TCC","GCG").source("merged").make(); + mergedVC = merger.createMergedVariantContext(thisVC, nextVC, ref, refLoc); + logger.warn(truthVC + " == " + mergedVC); + Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); + Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); + Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); + + // insertion + SNP + thisVC = new VariantContextBuilder().loc("2", 1703, 1703).alleles("T","TAAAAA").make(); + nextVC = new VariantContextBuilder().loc("2", 1705, 1705).alleles("C","G").make(); + truthVC = new VariantContextBuilder().loc("2", 1703, 1705).alleles("TCC","TAAAAACG").source("merged").make(); + mergedVC = merger.createMergedVariantContext(thisVC, nextVC, ref, refLoc); + logger.warn(truthVC + " == " + mergedVC); + Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); + Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); + Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); + + // SNP + insertion + thisVC = new VariantContextBuilder().loc("2", 1703, 1703).alleles("T","G").make(); + nextVC = new VariantContextBuilder().loc("2", 1705, 1705).alleles("C","CAAAAA").make(); + truthVC = new VariantContextBuilder().loc("2", 1703, 1705).alleles("TCC","GCCAAAAA").source("merged").make(); + mergedVC = merger.createMergedVariantContext(thisVC, nextVC, ref, refLoc); + logger.warn(truthVC + " == " + mergedVC); + Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); + Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); + Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); + + // deletion + SNP + thisVC = new VariantContextBuilder().loc("2", 1703, 1704).alleles("TC","T").make(); + nextVC = new VariantContextBuilder().loc("2", 1705, 1705).alleles("C","G").make(); + truthVC = new VariantContextBuilder().loc("2", 1703, 1705).alleles("TCC","TG").source("merged").make(); + mergedVC = merger.createMergedVariantContext(thisVC, nextVC, ref, refLoc); + logger.warn(truthVC + " == " + mergedVC); + Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); + Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); + Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); + + // SNP + deletion + thisVC = new VariantContextBuilder().loc("2", 1703, 1703).alleles("T","G").make(); + nextVC = new VariantContextBuilder().loc("2", 1705, 1706).alleles("CG","C").make(); + truthVC = new VariantContextBuilder().loc("2", 1703, 1706).alleles("TCCG","GCC").source("merged").make(); + mergedVC = merger.createMergedVariantContext(thisVC, nextVC, ref, refLoc); + logger.warn(truthVC + " == " + mergedVC); + Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); + Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); + Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); + + // insertion + deletion = MNP + thisVC = new VariantContextBuilder().loc("2", 1703, 1703).alleles("T","TA").make(); + nextVC = new VariantContextBuilder().loc("2", 1705, 1706).alleles("CG","C").make(); + truthVC = new VariantContextBuilder().loc("2", 1704, 1706).alleles("CCG","ACC").source("merged").make(); + mergedVC = merger.createMergedVariantContext(thisVC, nextVC, ref, refLoc); + logger.warn(truthVC + " == " + mergedVC); + Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); + Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); + Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); + + // insertion + deletion + thisVC = new VariantContextBuilder().loc("2", 1703, 1703).alleles("T","TAAAAA").make(); + nextVC = new VariantContextBuilder().loc("2", 1705, 1706).alleles("CG","C").make(); + truthVC = new VariantContextBuilder().loc("2", 1703, 1706).alleles("TCCG","TAAAAACC").source("merged").make(); + mergedVC = merger.createMergedVariantContext(thisVC, nextVC, ref, refLoc); + logger.warn(truthVC + " == " + mergedVC); + Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); + Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); + Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); + + // insertion + insertion + thisVC = new VariantContextBuilder().loc("2", 1703, 1703).alleles("T","TA").make(); + nextVC = new VariantContextBuilder().loc("2", 1705, 1705).alleles("C","CA").make(); + truthVC = new VariantContextBuilder().loc("2", 1703, 1705).alleles("TCC","TACCA").source("merged").make(); + mergedVC = merger.createMergedVariantContext(thisVC, nextVC, ref, refLoc); + logger.warn(truthVC + " == " + mergedVC); + Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); + Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); + Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); + + // deletion + deletion + thisVC = new VariantContextBuilder().loc("2", 1701, 1702).alleles("AT","A").make(); + nextVC = new VariantContextBuilder().loc("2", 1705, 1706).alleles("CG","C").make(); + truthVC = new VariantContextBuilder().loc("2", 1701, 1706).alleles("ATTCCG","ATCC").source("merged").make(); + mergedVC = merger.createMergedVariantContext(thisVC, nextVC, ref, refLoc); + logger.warn(truthVC + " == " + mergedVC); + Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); + Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); + Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); + + // deletion + insertion (abutting) + thisVC = new VariantContextBuilder().loc("2", 1701, 1702).alleles("AT","A").make(); + nextVC = new VariantContextBuilder().loc("2", 1702, 1702).alleles("T","GCGCGC").make(); + truthVC = new VariantContextBuilder().loc("2", 1701, 1702).alleles("AT","AGCGCGC").source("merged").make(); + mergedVC = merger.createMergedVariantContext(thisVC, nextVC, ref, refLoc); + logger.warn(truthVC + " == " + mergedVC); + Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); + Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); + Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); + + // complex + complex + thisVC = new VariantContextBuilder().loc("2", 1703, 1704).alleles("TC","AAA").make(); + nextVC = new VariantContextBuilder().loc("2", 1706, 1707).alleles("GG","AC").make(); + truthVC = new VariantContextBuilder().loc("2", 1703, 1707).alleles("TCCGG","AAACAC").source("merged").make(); + mergedVC = merger.createMergedVariantContext(thisVC, nextVC, ref, refLoc); + logger.warn(truthVC + " == " + mergedVC); + Assert.assertTrue(truthVC.hasSameAllelesAs(mergedVC)); + Assert.assertEquals(truthVC.getStart(), mergedVC.getStart()); + Assert.assertEquals(truthVC.getEnd(), mergedVC.getEnd()); + } + + @Test + public void testInsertionDeletionBecomingNullAllele() { + final byte[] ref = "CAAA".getBytes(); + final GenomeLoc refLoc = genomeLocParser.createGenomeLoc("2", 1700, 1700 + ref.length); + + // insertion + deletion results in a null allele, should return false + final VariantContext thisVC = new VariantContextBuilder().loc("2", 1700, 1701).alleles("CA","C").make(); + final VariantContext nextVC = new VariantContextBuilder().loc("2", 1703, 1703).alleles("A","AA").make(); + final VariantContext mergedVC = merger.createMergedVariantContext(thisVC, nextVC, ref, refLoc); + Assert.assertNull(mergedVC, "Insertion deletion becoming a null allele should return a null variant context"); + } + + /** + * Just returns a given R2 value for testing + */ + private static class MockLDCalculator extends HaplotypeLDCalculator { + private final double R2; + + private MockLDCalculator(double r2) { + R2 = r2; + } + + @Override + protected double computeProbOfBeingPhased(VariantContext first, VariantContext second) { + return R2; + } + } + + @DataProvider(name = "R2MergerData") + public Object[][] makeR2MergerData() { + List tests = new ArrayList(); + + // this functionality can be adapted to provide input data for whatever you might want in your data + final double thres = LDMerger.MERGE_EVENTS_PROB_PHASED_THRESHOLD; + for ( final double r2 : Arrays.asList(0.0, thres - 0.01, thres + 0.01, 1.0) ) { + tests.add(new Object[]{"ACGT", "CCGC", 2, "4M", "ACGT", "CCGC", r2, r2 >= thres}); + tests.add(new Object[]{"ACGT", "AGGC", 2, "4M", "CGT", "GGC", r2, r2 >= thres}); + tests.add(new Object[]{"ACGT", "ACCC", 2, "4M", "GT", "CC", r2, r2 >= thres}); + tests.add(new Object[]{"ACGT", "ACCGTT", 2, "2M1I1M1I1M", "CG", "CCGT", r2, r2 >= thres}); + tests.add(new Object[]{"ACGT", "AGCT", 2, "4M", "CG", "GC", r2, r2 >= thres}); + tests.add(new Object[]{"ACAGT", "AAGC", 2, "1M1D3M", "ACAGT", "AAGC", r2, r2 >= thres}); + tests.add(new Object[]{"ACAGT", "AAT", 2, "1M1D1M1D1M", "ACAG", "AA", r2, r2 >= thres}); + + // cannot be merged -- only 1 event + tests.add(new Object[]{"AAA", "ACA", 1, "3M", null, null, r2, false}); + + final int dist = LDMerger.MAX_DISTANCE_BETWEEN_SNPS_TO_MERGE + 2; + tests.add(new Object[]{Utils.dupString("A", dist), "C" + Utils.dupString("A", dist - 2) + "C", 2, dist + "M", null, null, r2, false}); + } + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "R2MergerData") + public void testR2Merger(final String refS, final String hapS, int nEvents, final String cigar, final String expectedMergedRef, final String expectedMergedAlt, final double r2, final boolean expectMerge) { + final Haplotype ref = new Haplotype(refS.getBytes(), true, 0, TextCigarCodec.getSingleton().decode(refS.length() + "M")); + final Haplotype hap = new Haplotype(hapS.getBytes(), false, 0, TextCigarCodec.getSingleton().decode(cigar)); + final GenomeLoc loc = new UnvalidatingGenomeLoc("1", 0, 1, ref.length()); + + final List haplotypes = Arrays.asList(ref, hap); + final TreeSet vcStarts = EventMap.buildEventMapsForHaplotypes(haplotypes, ref.getBases(), loc, false); + final MockLDCalculator r2Calc = new MockLDCalculator(r2); + + Assert.assertEquals(vcStarts.size(), nEvents); + final boolean merged = merger.mergeConsecutiveEventsBasedOnLDOnce(haplotypes, r2Calc, 1, vcStarts, ref.getBases(), loc); + Assert.assertEquals(merged, expectMerge); + Assert.assertEquals(vcStarts.size(), expectMerge ? 1 : nEvents); + if ( expectMerge ) { + final VariantContext vc = hap.getEventMap().getVariantContexts().iterator().next(); + Assert.assertTrue(vc.isBiallelic()); + Assert.assertEquals(vc.getReference().getDisplayString(), expectedMergedRef); + Assert.assertEquals(vc.getAlternateAllele(0).getDisplayString(), expectedMergedAlt); + } + } + + @Test + public void testR2MergerWithThirdHapWithoutEvent() { + final String refS = "ACGT"; + final String hapS = "CCGA"; + final String cigar = "4M"; + final Haplotype ref = new Haplotype(refS.getBytes(), true, 0, TextCigarCodec.getSingleton().decode(refS.length() + "M")); + final Haplotype hap1 = new Haplotype(hapS.getBytes(), false, 0, TextCigarCodec.getSingleton().decode(cigar)); + final Haplotype hap2 = new Haplotype("ACGA".getBytes(), false, 0, TextCigarCodec.getSingleton().decode(cigar)); + final GenomeLoc loc = new UnvalidatingGenomeLoc("1", 0, 1, ref.length()); + + final List haplotypes = Arrays.asList(ref, hap1, hap2); + final TreeSet vcStarts = EventMap.buildEventMapsForHaplotypes(haplotypes, ref.getBases(), loc, false); + final MockLDCalculator r2Calc = new MockLDCalculator(1.0); + + Assert.assertEquals(vcStarts.size(), 2); + final boolean merged = merger.mergeConsecutiveEventsBasedOnLDOnce(haplotypes, r2Calc, 1, vcStarts, ref.getBases(), loc); + Assert.assertEquals(merged, true); + Assert.assertEquals(vcStarts.size(), 1); + + final VariantContext vc = hap1.getEventMap().getVariantContexts().iterator().next(); + Assert.assertTrue(vc.isBiallelic()); + Assert.assertEquals(vc.getReference().getDisplayString(), "ACGT"); + Assert.assertEquals(vc.getAlternateAllele(0).getDisplayString(), "CCGA"); + + Assert.assertEquals(hap2.getEventMap().size(), 0); + } + + @Test + public void testR2MergerWithMultipleAllelesAtSites() { + final String refS = "ACGT"; + final String hapS = "TCGA"; + final String cigar = "4M"; + final Haplotype ref = new Haplotype(refS.getBytes(), true, 0, TextCigarCodec.getSingleton().decode(refS.length() + "M")); + final Haplotype hap1 = new Haplotype(hapS.getBytes(), false, 0, TextCigarCodec.getSingleton().decode(cigar)); + + final GenomeLoc loc = new UnvalidatingGenomeLoc("1", 0, 1, ref.length()); + for (final String hap2S : Arrays.asList("GCGA", "TCGG")) { + final Haplotype hap2 = new Haplotype(hap2S.getBytes(), false, 0, TextCigarCodec.getSingleton().decode(cigar)); + + final List haplotypes = Arrays.asList(ref, hap1, hap2); + final TreeSet vcStarts = EventMap.buildEventMapsForHaplotypes(haplotypes, ref.getBases(), loc, false); + final MockLDCalculator r2Calc = new MockLDCalculator(1.0); + + Assert.assertEquals(vcStarts.size(), 2); + final boolean merged = merger.mergeConsecutiveEventsBasedOnLDOnce(haplotypes, r2Calc, 1, vcStarts, ref.getBases(), loc); + Assert.assertEquals(merged, false); + Assert.assertEquals(vcStarts.size(), 2); + } + } +} \ No newline at end of file diff --git a/public/java/src/org/broadinstitute/sting/utils/haplotype/EventExtractor.java b/public/java/src/org/broadinstitute/sting/utils/haplotype/EventMap.java similarity index 58% rename from public/java/src/org/broadinstitute/sting/utils/haplotype/EventExtractor.java rename to public/java/src/org/broadinstitute/sting/utils/haplotype/EventMap.java index c32cde641..7bc6acbfe 100644 --- a/public/java/src/org/broadinstitute/sting/utils/haplotype/EventExtractor.java +++ b/public/java/src/org/broadinstitute/sting/utils/haplotype/EventMap.java @@ -1,27 +1,27 @@ /* - * Copyright (c) 2012 The Broad Institute - * - * Permission is hereby granted, free of charge, to any person - * obtaining a copy of this software and associated documentation - * files (the "Software"), to deal in the Software without - * restriction, including without limitation the rights to use, - * copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following - * conditions: - * - * The above copyright notice and this permission notice shall be - * included in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES - * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT - * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, - * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR - * THE USE OR OTHER DEALINGS IN THE SOFTWARE. - */ +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ package org.broadinstitute.sting.utils.haplotype; @@ -35,7 +35,6 @@ import org.broadinstitute.sting.utils.BaseUtils; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.sam.AlignmentUtils; -import org.broadinstitute.sting.utils.variant.GATKVariantContextUtils; import org.broadinstitute.variant.variantcontext.Allele; import org.broadinstitute.variant.variantcontext.VariantContext; import org.broadinstitute.variant.variantcontext.VariantContextBuilder; @@ -49,39 +48,40 @@ import java.util.*; * Date: 3/27/13 * Time: 8:35 AM */ -public class EventExtractor extends TreeMap { - private final static Logger logger = Logger.getLogger(EventExtractor.class); - private final static boolean mergeClumpedEvents = true; +public class EventMap extends TreeMap { + private final static Logger logger = Logger.getLogger(EventMap.class); protected final static int MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION = 3; public final static Allele SYMBOLIC_UNASSEMBLED_EVENT_ALLELE = Allele.create("", false); - public EventExtractor( final Haplotype haplotype, final byte[] ref, final GenomeLoc refLoc, final String sourceNameToAdd ) { + private final Haplotype haplotype; + private final byte[] ref; + private final GenomeLoc refLoc; + private final String sourceNameToAdd; + + public EventMap(final Haplotype haplotype, final byte[] ref, final GenomeLoc refLoc, final String sourceNameToAdd) { super(); + this.haplotype = haplotype; + this.ref = ref; + this.refLoc = refLoc; + this.sourceNameToAdd = sourceNameToAdd; - processCigarForInitialEvents(haplotype, ref, refLoc, sourceNameToAdd); - if ( mergeClumpedEvents && getNumberOfEvents() >= MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION) { - replaceClumpedEventsWithBlockSubstititions(haplotype, ref, refLoc); - } + processCigarForInitialEvents(); } /** * For testing. Let's you set up a explicit configuration without having to process a haplotype and reference * @param stateForTesting */ - protected EventExtractor(final Map stateForTesting) { - super(stateForTesting); - } - - /** - * For testing. Let's you set up a explicit configuration without having to process a haplotype and reference - * @param stateForTesting - */ - protected EventExtractor(final Collection stateForTesting) { + protected EventMap(final Collection stateForTesting) { + haplotype = null; + ref = null; + refLoc = null; + sourceNameToAdd = null; for ( final VariantContext vc : stateForTesting ) addVC(vc); } - protected void processCigarForInitialEvents(final Haplotype haplotype, final byte[] ref, final GenomeLoc refLoc, final String sourceNameToAdd) { + protected void processCigarForInitialEvents() { final Cigar cigar = haplotype.getCigar(); final byte[] alignment = haplotype.getBases(); @@ -172,11 +172,22 @@ public class EventExtractor extends TreeMap { } } - private void addVC(final VariantContext vc) { + /** + * Add VariantContext vc to this map, merging events with the same start sites if necessary + * @param vc the variant context to add + */ + protected void addVC(final VariantContext vc) { addVC(vc, true); } - private void addVC(final VariantContext vc, final boolean merge) { + /** + * Add VariantContext vc to this map + * @param vc the variant context to add + * @param merge should we attempt to merge it with an already existing element, or should we throw an error in that case? + */ + protected void addVC(final VariantContext vc, final boolean merge) { + if ( vc == null ) throw new IllegalArgumentException("vc cannot be null"); + if ( containsKey(vc.getStart()) ) { if ( merge ) { final VariantContext prev = get(vc.getStart()); @@ -188,20 +199,46 @@ public class EventExtractor extends TreeMap { put(vc.getStart(), vc); } - private VariantContext makeBlock(final VariantContext vc1, final VariantContext vc2) { - if ( ! vc1.isSNP() ) throw new IllegalArgumentException("vc1 must be a snp"); + /** + * Create a block substitution out of two variant contexts that start at the same position + * + * vc1 can be SNP, and vc2 can then be either a insertion or deletion. + * If vc1 is an indel, then vc2 must be the opposite type (vc1 deletion => vc2 must be an insertion) + * + * @param vc1 the first variant context we want to merge + * @param vc2 the second + * @return a block substitution that represents the composite substitution implied by vc1 and vc2 + */ + protected VariantContext makeBlock(final VariantContext vc1, final VariantContext vc2) { + if ( vc1.getStart() != vc2.getStart() ) throw new IllegalArgumentException("vc1 and 2 must have the same start but got " + vc1 + " and " + vc2); + if ( ! vc1.isBiallelic() ) throw new IllegalArgumentException("vc1 must be biallelic"); + if ( ! vc1.isSNP() ) { + if ( ! ((vc1.isSimpleDeletion() && vc2.isSimpleInsertion()) || (vc1.isSimpleInsertion() && vc2.isSimpleDeletion()))) + throw new IllegalArgumentException("Can only merge single insertion with deletion (or vice versa) but got " + vc1 + " merging with " + vc2); + } else if ( vc2.isSNP() ) { + throw new IllegalArgumentException("vc1 is " + vc1 + " but vc2 is a SNP, which implies there's been some terrible bug in the cigar " + vc2); + } - Allele ref, alt; + final Allele ref, alt; final VariantContextBuilder b = new VariantContextBuilder(vc1); - if ( vc1.getReference().equals(vc2.getReference()) ) { - // we've got an insertion, so we just update the alt to have the prev alt - ref = vc1.getReference(); - alt = Allele.create(vc1.getAlternateAllele(0).getDisplayString() + vc2.getAlternateAllele(0).getDisplayString().substring(1), false); + if ( vc1.isSNP() ) { + // we have to repair the first base, so SNP case is special cased + if ( vc1.getReference().equals(vc2.getReference()) ) { + // we've got an insertion, so we just update the alt to have the prev alt + ref = vc1.getReference(); + alt = Allele.create(vc1.getAlternateAllele(0).getDisplayString() + vc2.getAlternateAllele(0).getDisplayString().substring(1), false); + } else { + // we're dealing with a deletion, so we patch the ref + ref = vc2.getReference(); + alt = vc1.getAlternateAllele(0); + b.stop(vc2.getEnd()); + } } else { - // we're dealing with a deletion, so we patch the ref - ref = vc2.getReference(); - alt = vc1.getAlternateAllele(0); - b.stop(vc2.getEnd()); + final VariantContext insertion = vc1.isSimpleInsertion() ? vc1 : vc2; + final VariantContext deletion = vc1.isSimpleInsertion() ? vc2 : vc1; + ref = deletion.getReference(); + alt = insertion.getAlternateAllele(0); + b.stop(deletion.getEnd()); } return b.alleles(Arrays.asList(ref, alt)).make(); @@ -209,24 +246,26 @@ public class EventExtractor extends TreeMap { // TODO -- warning this is an O(N^3) algorithm because I'm just lazy. If it's valuable we need to reengineer it @Requires("getNumberOfEvents() > 0") - protected void replaceClumpedEventsWithBlockSubstititions(final Haplotype haplotype, final byte[] ref, final GenomeLoc refLoc) { - int lastStart = -1; - for ( boolean foundOne = true; foundOne; ) { - foundOne = false; - for ( final VariantContext vc : getVariantContexts() ) { - if ( vc.getStart() > lastStart ) { - lastStart = vc.getStart(); - final List neighborhood = getNeighborhood(vc, 10); - if ( updateToBlockSubstitutionIfBetter(neighborhood, haplotype, ref, refLoc) ) { - foundOne = true; - break; + protected void replaceClumpedEventsWithBlockSubstititions() { + if ( getNumberOfEvents() >= MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION) { + int lastStart = -1; + for ( boolean foundOne = true; foundOne; ) { + foundOne = false; + for ( final VariantContext vc : getVariantContexts() ) { + if ( vc.getStart() > lastStart ) { + lastStart = vc.getStart(); + final List neighborhood = getNeighborhood(vc, 10); + if ( updateToBlockSubstitutionIfBetter(neighborhood) ) { + foundOne = true; + break; + } } } } } } - protected boolean updateToBlockSubstitutionIfBetter(final List neighbors, final Haplotype haplotype, final byte[] ref, final GenomeLoc refLoc) { + protected boolean updateToBlockSubstitutionIfBetter(final List neighbors) { if (neighbors.size() < MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION) return false; // TODO -- need more tests to decide if this is really so good @@ -284,24 +323,70 @@ public class EventExtractor extends TreeMap { return neighbors; } + /** + * Get the starting positions of events in this event map + * @return + */ public Set getStartPositions() { return keySet(); } + /** + * Get the variant contexts in order of start position in this event map + * @return + */ public Collection getVariantContexts() { return values(); } + /** + * How many events do we have? + * @return + */ public int getNumberOfEvents() { return size(); } @Override public String toString() { - final StringBuilder b = new StringBuilder("EventExtractor{"); + final StringBuilder b = new StringBuilder("EventMap{"); for ( final VariantContext vc : getVariantContexts() ) b.append(String.format("%s:%d-%d %s,", vc.getChr(), vc.getStart(), vc.getEnd(), vc.getAlleles())); b.append("}"); return b.toString(); } + + /** + * Build event maps for each haplotype, returning the sorted set of all of the starting positions of all + * events across all haplotypes + * + * @param haplotypes a list of haplotypes + * @param ref the reference bases + * @param refLoc the span of the reference bases + * @param debug if true, we'll emit debugging information during this operation + * @return a sorted set of start positions of all events among all haplotypes + */ + public static TreeSet buildEventMapsForHaplotypes( final List haplotypes, + final byte[] ref, + final GenomeLoc refLoc, + final boolean debug) { + // Using the cigar from each called haplotype figure out what events need to be written out in a VCF file + final TreeSet startPosKeySet = new TreeSet(); + int hapNumber = 0; + + if( debug ) logger.info("=== Best Haplotypes ==="); + for( final Haplotype h : haplotypes ) { + // Walk along the alignment and turn any difference from the reference into an event + h.setEventMap( new EventMap( h, ref, refLoc, "HC" + hapNumber++ ) ); + startPosKeySet.addAll(h.getEventMap().getStartPositions()); + + if( debug ) { + logger.info(h.toString()); + logger.info("> Cigar = " + h.getCigar()); + logger.info(">> Events = " + h.getEventMap()); + } + } + + return startPosKeySet; + } } diff --git a/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java b/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java index a94c08198..081fd14e0 100644 --- a/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java +++ b/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java @@ -37,15 +37,13 @@ import org.broadinstitute.sting.utils.sam.AlignmentUtils; import org.broadinstitute.sting.utils.sam.ReadUtils; import org.broadinstitute.variant.variantcontext.Allele; -import java.io.Serializable; import java.util.Arrays; -import java.util.Comparator; import java.util.LinkedHashMap; import java.util.List; public class Haplotype extends Allele { private GenomeLoc genomeLocation = null; - private EventExtractor eventMap = null; + private EventMap eventMap = null; private Cigar cigar; private int alignmentStartHapwrtRef; private Event artificialEvent = null; @@ -115,11 +113,11 @@ public class Haplotype extends Allele { return Arrays.hashCode(getBases()); } - public EventExtractor getEventMap() { + public EventMap getEventMap() { return eventMap; } - public void setEventMap( final EventExtractor eventMap ) { + public void setEventMap( final EventMap eventMap ) { this.eventMap = eventMap; } @@ -219,25 +217,6 @@ public class Haplotype extends Allele { return new Haplotype(newHaplotypeBases, new Event(refAllele, altAllele, genomicInsertLocation)); } - public static class HaplotypeBaseComparator implements Comparator, Serializable { - @Override - public int compare( final Haplotype hap1, final Haplotype hap2 ) { - return compareHaplotypeBases(hap1, hap2); - } - - public static int compareHaplotypeBases(final Haplotype hap1, final Haplotype hap2) { - final byte[] arr1 = hap1.getBases(); - final byte[] arr2 = hap2.getBases(); - // compares byte arrays using lexical ordering - final int len = Math.min(arr1.length, arr2.length); - for( int iii = 0; iii < len; iii++ ) { - final int cmp = arr1[iii] - arr2[iii]; - if (cmp != 0) { return cmp; } - } - return arr2.length - arr1.length; - } - } - public static LinkedHashMap makeHaplotypeListFromAlleles(final List alleleList, final int startPos, final ReferenceContext ref, @@ -316,15 +295,4 @@ public class Haplotype extends Allele { public void setScore(double score) { this.score = this.isReference() ? Double.MAX_VALUE : score; } - - /** - * A comparator that sorts haplotypes in decreasing order of score, so that the best supported - * haplotypes are at the top - */ - public static class ScoreComparator implements Comparator { - @Override - public int compare(Haplotype o1, Haplotype o2) { - return -1 * Double.valueOf(o1.getScore()).compareTo(o2.getScore()); - } - } } diff --git a/public/java/src/org/broadinstitute/sting/utils/haplotype/HaplotypeBaseComparator.java b/public/java/src/org/broadinstitute/sting/utils/haplotype/HaplotypeBaseComparator.java new file mode 100644 index 000000000..191442e3e --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/haplotype/HaplotypeBaseComparator.java @@ -0,0 +1,42 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.utils.haplotype; + +import java.util.Comparator; + +/** + * Compares two haplotypes in the lexicographic order of their bases + * + * User: depristo + * Date: 3/29/13 + * Time: 11:09 AM + */ +public class HaplotypeBaseComparator implements Comparator { + @Override + public int compare( final Haplotype hap1, final Haplotype hap2 ) { + return hap1.getBaseString().compareTo(hap2.getBaseString()); + } +} diff --git a/public/java/src/org/broadinstitute/sting/utils/haplotype/HaplotypeScoreComparator.java b/public/java/src/org/broadinstitute/sting/utils/haplotype/HaplotypeScoreComparator.java new file mode 100644 index 000000000..40146ba88 --- /dev/null +++ b/public/java/src/org/broadinstitute/sting/utils/haplotype/HaplotypeScoreComparator.java @@ -0,0 +1,39 @@ +/* +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ + +package org.broadinstitute.sting.utils.haplotype; + +import java.util.Comparator; + +/** + * A comparator that sorts haplotypes in decreasing order of score, so that the best supported + * haplotypes are at the top + */ +public class HaplotypeScoreComparator implements Comparator { + @Override + public int compare(Haplotype o1, Haplotype o2) { + return -1 * Double.valueOf(o1.getScore()).compareTo(o2.getScore()); + } +} diff --git a/public/java/test/org/broadinstitute/sting/utils/haplotype/EventExtractorUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/haplotype/EventMapUnitTest.java similarity index 61% rename from public/java/test/org/broadinstitute/sting/utils/haplotype/EventExtractorUnitTest.java rename to public/java/test/org/broadinstitute/sting/utils/haplotype/EventMapUnitTest.java index 480f82a46..d0b418b96 100644 --- a/public/java/test/org/broadinstitute/sting/utils/haplotype/EventExtractorUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/haplotype/EventMapUnitTest.java @@ -1,27 +1,27 @@ /* - * Copyright (c) 2012 The Broad Institute - * - * Permission is hereby granted, free of charge, to any person - * obtaining a copy of this software and associated documentation - * files (the "Software"), to deal in the Software without - * restriction, including without limitation the rights to use, - * copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following - * conditions: - * - * The above copyright notice and this permission notice shall be - * included in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES - * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT - * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, - * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR - * THE USE OR OTHER DEALINGS IN THE SOFTWARE. - */ +* Copyright (c) 2012 The Broad Institute +* +* Permission is hereby granted, free of charge, to any person +* obtaining a copy of this software and associated documentation +* files (the "Software"), to deal in the Software without +* restriction, including without limitation the rights to use, +* copy, modify, merge, publish, distribute, sublicense, and/or sell +* copies of the Software, and to permit persons to whom the +* Software is furnished to do so, subject to the following +* conditions: +* +* The above copyright notice and this permission notice shall be +* included in all copies or substantial portions of the Software. +* +* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR +* THE USE OR OTHER DEALINGS IN THE SOFTWARE. +*/ package org.broadinstitute.sting.utils.haplotype; @@ -31,16 +31,14 @@ import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.UnvalidatingGenomeLoc; import org.broadinstitute.sting.utils.Utils; import org.broadinstitute.sting.utils.variant.GATKVariantContextUtils; -import org.broadinstitute.variant.variantcontext.Allele; import org.broadinstitute.variant.variantcontext.VariantContext; -import org.broadinstitute.variant.variantcontext.VariantContextBuilder; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.util.*; -public class EventExtractorUnitTest extends BaseTest { +public class EventMapUnitTest extends BaseTest { private final static String CHR = "20"; private final static String NAME = "foo"; @@ -71,9 +69,9 @@ public class EventExtractorUnitTest extends BaseTest { vcs.add(vc); } - tests.add(new Object[]{new EventExtractor(new LinkedList(allVCS)), Collections.emptyList()}); + tests.add(new Object[]{new EventMap(new LinkedList(allVCS)), Collections.emptyList()}); allVCS.addAll(vcs); - tests.add(new Object[]{new EventExtractor(allVCS), vcs}); + tests.add(new Object[]{new EventMap(allVCS), vcs}); } } } @@ -86,12 +84,12 @@ public class EventExtractorUnitTest extends BaseTest { /** * Example testng test using MyDataProvider */ - @Test(dataProvider = "MyDataProvider", enabled = true) // TODO == reenable - public void testGetNeighborhood(final EventExtractor eventExtractor, final List expectedNeighbors) { + @Test(dataProvider = "MyDataProvider", enabled = true) + public void testGetNeighborhood(final EventMap eventMap, final List expectedNeighbors) { final VariantContext leftOfNeighors = expectedNeighbors.isEmpty() ? null : expectedNeighbors.get(0); - for ( final VariantContext vc : eventExtractor.getVariantContexts() ) { - final List n = eventExtractor.getNeighborhood(vc, 5); + for ( final VariantContext vc : eventMap.getVariantContexts() ) { + final List n = eventMap.getNeighborhood(vc, 5); if ( leftOfNeighors == vc ) Assert.assertEquals(n, expectedNeighbors); else if ( ! expectedNeighbors.contains(vc) ) @@ -103,7 +101,7 @@ public class EventExtractorUnitTest extends BaseTest { public Object[][] makeBlockSubstitutionsData() { List tests = new ArrayList(); - for ( int size = EventExtractor.MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION; size < 10; size++ ) { + for ( int size = EventMap.MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION; size < 10; size++ ) { final String ref = Utils.dupString("A", size); final String alt = Utils.dupString("C", size); tests.add(new Object[]{ref, alt, size + "M", GATKVariantContextUtils.makeFromAlleles(NAME, CHR, 1, Arrays.asList(ref, alt))}); @@ -131,7 +129,8 @@ public class EventExtractorUnitTest extends BaseTest { public void testBlockSubstitutionsData(final String refBases, final String haplotypeBases, final String cigar, final VariantContext expectedBlock) { final Haplotype hap = new Haplotype(haplotypeBases.getBytes(), false, 0, TextCigarCodec.getSingleton().decode(cigar)); final GenomeLoc loc = new UnvalidatingGenomeLoc(CHR, 0, 1, refBases.length()); - final EventExtractor ee = new EventExtractor(hap, refBases.getBytes(), loc, NAME); + final EventMap ee = new EventMap(hap, refBases.getBytes(), loc, NAME); + ee.replaceClumpedEventsWithBlockSubstititions(); Assert.assertEquals(ee.getNumberOfEvents(), 1); final VariantContext actual = ee.getVariantContexts().iterator().next(); Assert.assertTrue(GATKVariantContextUtils.equalSites(actual, expectedBlock), "Failed with " + actual); @@ -142,11 +141,11 @@ public class EventExtractorUnitTest extends BaseTest { List tests = new ArrayList(); tests.add(new Object[]{"TT", "GCT", "1M1I1M", Arrays.asList(Arrays.asList("T", "GC"))}); - tests.add(new Object[]{"GCT", "TT", "1M1D", Arrays.asList(Arrays.asList("GC", "T"))}); + tests.add(new Object[]{"GCT", "TT", "1M1D1M", Arrays.asList(Arrays.asList("GC", "T"))}); tests.add(new Object[]{"TT", "GCCT", "1M2I1M", Arrays.asList(Arrays.asList("T", "GCC"))}); - tests.add(new Object[]{"GCCT", "TT", "1M2D", Arrays.asList(Arrays.asList("GCC", "T"))}); - tests.add(new Object[]{"AAGCCT", "AATT", "3M2D", Arrays.asList(Arrays.asList("GCC", "T"))}); - tests.add(new Object[]{"AAGCCT", "GATT", "3M2D", Arrays.asList(Arrays.asList("A", "G"), Arrays.asList("GCC", "T"))}); + tests.add(new Object[]{"GCCT", "TT", "1M2D1M", Arrays.asList(Arrays.asList("GCC", "T"))}); + tests.add(new Object[]{"AAGCCT", "AATT", "3M2D1M", Arrays.asList(Arrays.asList("GCC", "T"))}); + tests.add(new Object[]{"AAGCCT", "GATT", "3M2D1M", Arrays.asList(Arrays.asList("A", "G"), Arrays.asList("GCC", "T"))}); tests.add(new Object[]{"AAAAA", "AGACA", "5M", Arrays.asList(Arrays.asList("A", "G"), Arrays.asList("A", "C"))}); return tests.toArray(new Object[][]{}); @@ -155,11 +154,12 @@ public class EventExtractorUnitTest extends BaseTest { /** * Example testng test using MyDataProvider */ - @Test(dataProvider = "AdjacentSNPIndelTest", enabled = true) + @Test(dataProvider = "AdjacentSNPIndelTest") public void testAdjacentSNPIndelTest(final String refBases, final String haplotypeBases, final String cigar, final List> expectedAlleles) { final Haplotype hap = new Haplotype(haplotypeBases.getBytes(), false, 0, TextCigarCodec.getSingleton().decode(cigar)); final GenomeLoc loc = new UnvalidatingGenomeLoc(CHR, 0, 1, refBases.length()); - final EventExtractor ee = new EventExtractor(hap, refBases.getBytes(), loc, NAME); + final EventMap ee = new EventMap(hap, refBases.getBytes(), loc, NAME); + ee.replaceClumpedEventsWithBlockSubstititions(); Assert.assertEquals(ee.getNumberOfEvents(), expectedAlleles.size()); final List actuals = new ArrayList(ee.getVariantContexts()); for ( int i = 0; i < ee.getNumberOfEvents(); i++ ) { @@ -168,4 +168,36 @@ public class EventExtractorUnitTest extends BaseTest { Assert.assertEquals(actual.getAlternateAllele(0).getDisplayString(), expectedAlleles.get(i).get(1)); } } + + @DataProvider(name = "MakeBlockData") + public Object[][] makeMakeBlockData() { + List tests = new ArrayList(); + + tests.add(new Object[]{Arrays.asList("A", "G"), Arrays.asList("AGT", "A"), Arrays.asList("AGT", "G")}); + tests.add(new Object[]{Arrays.asList("A", "G"), Arrays.asList("A", "AGT"), Arrays.asList("A", "GGT")}); + + tests.add(new Object[]{Arrays.asList("AC", "A"), Arrays.asList("A", "AGT"), Arrays.asList("AC", "AGT")}); + tests.add(new Object[]{Arrays.asList("ACGTA", "A"), Arrays.asList("A", "AG"), Arrays.asList("ACGTA", "AG")}); + tests.add(new Object[]{Arrays.asList("AC", "A"), Arrays.asList("A", "AGCGT"), Arrays.asList("AC", "AGCGT")}); + tests.add(new Object[]{Arrays.asList("A", "ACGTA"), Arrays.asList("AG", "A"), Arrays.asList("AG", "ACGTA")}); + tests.add(new Object[]{Arrays.asList("A", "AC"), Arrays.asList("AGCGT", "A"), Arrays.asList("AGCGT", "AC")}); + + return tests.toArray(new Object[][]{}); + } + + /** + * Example testng test using MyDataProvider + */ + @Test(dataProvider = "MakeBlockData", enabled = true) + public void testGetNeighborhood(final List firstAlleles, final List secondAlleles, final List expectedAlleles) { + final VariantContext vc1 = GATKVariantContextUtils.makeFromAlleles("x", "20", 10, firstAlleles); + final VariantContext vc2 = GATKVariantContextUtils.makeFromAlleles("x", "20", 10, secondAlleles); + final VariantContext expected = GATKVariantContextUtils.makeFromAlleles("x", "20", 10, expectedAlleles); + + final EventMap eventMap = new EventMap(Collections.emptyList()); + final VariantContext block = eventMap.makeBlock(vc1, vc2); + + Assert.assertEquals(block.getStart(), expected.getStart()); + Assert.assertEquals(block.getAlleles(), expected.getAlleles()); + } } From 7105ad65a6ab37a675d74cd468316f122cb3c40c Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Tue, 2 Apr 2013 15:57:45 -0400 Subject: [PATCH 08/23] Remove the capability of EventMap to emit symbolic alleles for unassembled events -- These events always occur on the very edge of the haplotypes, and are intrinsically dodgy. So instead of emitting them and then potentially having to deal with merging real basepair events into them we just no longer emit those events. --- .../org/broadinstitute/sting/utils/haplotype/EventMap.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/public/java/src/org/broadinstitute/sting/utils/haplotype/EventMap.java b/public/java/src/org/broadinstitute/sting/utils/haplotype/EventMap.java index 7bc6acbfe..1d33e328d 100644 --- a/public/java/src/org/broadinstitute/sting/utils/haplotype/EventMap.java +++ b/public/java/src/org/broadinstitute/sting/utils/haplotype/EventMap.java @@ -105,8 +105,9 @@ public class EventMap extends TreeMap { if( BaseUtils.isRegularBase(refByte) ) { insertionAlleles.add( Allele.create(refByte, true) ); } - if( cigarIndex == 0 || cigarIndex == cigar.getCigarElements().size() - 1 ) { // if the insertion isn't completely resolved in the haplotype then make it a symbolic allele - insertionAlleles.add( SYMBOLIC_UNASSEMBLED_EVENT_ALLELE ); + if( cigarIndex == 0 || cigarIndex == cigar.getCigarElements().size() - 1 ) { + // if the insertion isn't completely resolved in the haplotype, skip it + // note this used to emit SYMBOLIC_UNASSEMBLED_EVENT_ALLELE but that seems dangerous } else { byte[] insertionBases = new byte[]{}; insertionBases = ArrayUtils.add(insertionBases, ref[refPos - 1]); // add the padding base From 2aac9e2782aaac2aaf60e06ca6734415c8d06743 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Sun, 31 Mar 2013 14:40:14 -0400 Subject: [PATCH 09/23] More efficient ZipLinearChains algorithm -- Goes through the graph looking for chains to zip, accumulates the vertices of the chains, and then finally go through and updates the graph in one big go. Vastly more efficient than the previous version, but unfortunately doesn't actually work now -- Also incorporate edge weight propagation into SeqGraph zipLinearChains. The edge weights for all incoming and outgoing edges are now their previous value, plus the sum of the internal chain edges / n such edges --- .../haplotypecaller/graphs/SeqGraph.java | 208 +++++++++++++----- .../graphs/SeqGraphUnitTest.java | 177 ++++++++++++++- 2 files changed, 328 insertions(+), 57 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java index 400b5c7ee..d08c2f211 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java @@ -46,10 +46,13 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; -import org.apache.commons.lang.ArrayUtils; +import com.google.java.contract.Ensures; +import com.google.java.contract.Requires; import java.io.File; import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; import java.util.Set; /** @@ -58,7 +61,7 @@ import java.util.Set; * @author: depristo * @since 03/2013 */ -public class SeqGraph extends BaseGraph { +public final class SeqGraph extends BaseGraph { private final static boolean PRINT_SIMPLIFY_GRAPHS = false; private final static int MIN_SUFFIX_TO_MERGE_TAILS = 5; @@ -118,18 +121,8 @@ public class SeqGraph extends BaseGraph { /** * Zip up all of the simple linear chains present in this graph. - */ - public boolean zipLinearChains() { - boolean foundOne = false; - while( zipOneLinearChain() ) { - // just keep going until zipOneLinearChain says its done - foundOne = true; - } - return foundOne; - } - - /** - * Merge together two vertices in the graph v1 -> v2 into a single vertex v' containing v1 + v2 sequence + * + * Merges together all pairs of vertices in the graph v1 -> v2 into a single vertex v' containing v1 + v2 sequence * * Only works on vertices where v1's only outgoing edge is to v2 and v2's only incoming edge is from v1. * @@ -137,44 +130,153 @@ public class SeqGraph extends BaseGraph { * * @return true if any such pair of vertices could be found, false otherwise */ - protected boolean zipOneLinearChain() { - for( final BaseEdge e : edgeSet() ) { - final SeqVertex outgoingVertex = getEdgeTarget(e); - final SeqVertex incomingVertex = getEdgeSource(e); - if( !outgoingVertex.equals(incomingVertex) - && outDegreeOf(incomingVertex) == 1 && inDegreeOf(outgoingVertex) == 1 - && isReferenceNode(incomingVertex) == isReferenceNode(outgoingVertex) ) { - - final Set outEdges = outgoingEdgesOf(outgoingVertex); - final Set inEdges = incomingEdgesOf(incomingVertex); - final BaseEdge singleOutEdge = outEdges.isEmpty() ? null : outEdges.iterator().next(); - final BaseEdge singleInEdge = inEdges.isEmpty() ? null : inEdges.iterator().next(); - - if( inEdges.size() == 1 && outEdges.size() == 1 ) { - singleInEdge.setMultiplicity( singleInEdge.getMultiplicity() + ( e.getMultiplicity() / 2 ) ); - singleOutEdge.setMultiplicity( singleOutEdge.getMultiplicity() + ( e.getMultiplicity() / 2 ) ); - } else if( inEdges.size() == 1 ) { - singleInEdge.setMultiplicity( Math.max(singleInEdge.getMultiplicity() + ( e.getMultiplicity() - 1 ), 0) ); - } else if( outEdges.size() == 1 ) { - singleOutEdge.setMultiplicity( Math.max( singleOutEdge.getMultiplicity() + ( e.getMultiplicity() - 1 ), 0) ); - } - - final SeqVertex addedVertex = new SeqVertex( ArrayUtils.addAll(incomingVertex.getSequence(), outgoingVertex.getSequence()) ); - addVertex(addedVertex); - for( final BaseEdge edge : outEdges ) { - addEdge(addedVertex, getEdgeTarget(edge), new BaseEdge(edge.isRef(), edge.getMultiplicity())); - } - for( final BaseEdge edge : inEdges ) { - addEdge(getEdgeSource(edge), addedVertex, new BaseEdge(edge.isRef(), edge.getMultiplicity())); - } - - removeVertex(incomingVertex); - removeVertex(outgoingVertex); - return true; - } + public boolean zipLinearChains() { + // create the list of start sites [doesn't modify graph yet] + final List zipStarts = new LinkedList(); + for ( final SeqVertex source : vertexSet() ) { + if ( isLinearChainStart(source) ) + zipStarts.add(source); } - return false; + if ( zipStarts.isEmpty() ) // nothing to do, as nothing could start a chain + return false; + + // At this point, zipStarts contains all of the vertices in this graph that might start some linear + // chain of vertices. We walk through each start, building up the linear chain of vertices and then + // zipping them up with mergeLinearChain, if possible + boolean mergedOne = false; + for ( final SeqVertex zipStart : zipStarts ) { + final LinkedList linearChain = traceLinearChain(zipStart); + + // merge the linearized chain, recording if we actually did some useful work + mergedOne |= mergeLinearChain(linearChain); + } + + return mergedOne; + } + + /** + * Is source vertex potentially a start of a linear chain of vertices? + * + * We are a start of a zip chain if our out degree is 1 and either the + * the vertex has no incoming connections or 2 or more (we must start a chain) or + * we have exactly one incoming vertex and that one has out-degree > 1 (i.e., source's incoming + * vertex couldn't be a start itself + * + * @param source a non-null vertex + * @return true if source might start a linear chain + */ + @Requires("source != null") + private boolean isLinearChainStart(final SeqVertex source) { + return outDegreeOf(source) == 1 + && ( inDegreeOf(source) != 1 + || outDegreeOf(incomingVerticesOf(source).iterator().next()) > 1 ); + } + + /** + * Get all of the vertices in a linear chain of vertices starting at zipStart + * + * Build a list of vertices (in order) starting from zipStart such that each sequential pair of vertices + * in the chain A and B can be zipped together. + * + * @param zipStart a vertex that starts a linear chain + * @return a list of vertices that comprise a linear chain starting with zipStart. The resulting + * list will always contain at least zipStart as the first element. + */ + @Requires("isLinearChainStart(zipStart)") + @Ensures({"result != null", "result.size() >= 1"}) + private LinkedList traceLinearChain(final SeqVertex zipStart) { + final LinkedList linearChain = new LinkedList(); + linearChain.add(zipStart); + + boolean lastIsRef = isReferenceNode(zipStart); // remember because this calculation is expensive + SeqVertex last = zipStart; + while (true) { + if ( outDegreeOf(last) != 1 ) + // cannot extend a chain from last if last has multiple outgoing branches + break; + + // there can only be one (outgoing edge of last) by contract + final SeqVertex target = getEdgeTarget(outgoingEdgeOf(last)); + + if ( inDegreeOf(target) != 1 || last.equals(target) ) + // cannot zip up a target that has multiple incoming nodes or that's a cycle to the last node + break; + + final boolean targetIsRef = isReferenceNode(target); + if ( lastIsRef != targetIsRef ) // both our isRef states must be equal + break; + + linearChain.add(target); // extend our chain by one + + // update our last state to be the current state, and continue + last = target; + lastIsRef = targetIsRef; + } + + return linearChain; + } + + /** + * Merge a linear chain of vertices into a single combined vertex, and update this graph to such that + * the incoming edges into the first element of the linearChain and the outgoing edges from linearChain.getLast() + * all point to this new combined vertex. + * + * @param linearChain a non-empty chain of vertices that can be zipped up into a single vertex + * @return true if we actually merged at least two vertices together + */ + protected boolean mergeLinearChain(final LinkedList linearChain) { + if ( linearChain.isEmpty() ) throw new IllegalArgumentException("BUG: cannot have linear chain with 0 elements but got " + linearChain); + + final SeqVertex first = linearChain.getFirst(); + final SeqVertex last = linearChain.getLast(); + + if ( first == last ) return false; // only one element in the chain, cannot be extended + + // create the combined vertex, and add it to the graph + // TODO -- performance problem -- can be optimized if we want + final List seqs = new LinkedList(); + for ( SeqVertex v : linearChain ) seqs.add(v.getSequence()); + final byte[] seqsCat = org.broadinstitute.sting.utils.Utils.concat(seqs.toArray(new byte[][]{})); + final SeqVertex addedVertex = new SeqVertex( seqsCat ); + addVertex(addedVertex); + + final Set inEdges = incomingEdgesOf(first); + final Set outEdges = outgoingEdgesOf(last); + + final int nEdges = inEdges.size() + outEdges.size(); + int sharedWeightAmongEdges = nEdges == 0 ? 0 : sumEdgeWeightAlongChain(linearChain) / nEdges; + final BaseEdge inc = new BaseEdge(false, sharedWeightAmongEdges); // template to make .add function call easy + + // update the incoming and outgoing edges to point to the new vertex + for( final BaseEdge edge : outEdges ) { addEdge(addedVertex, getEdgeTarget(edge), new BaseEdge(edge).add(inc)); } + for( final BaseEdge edge : inEdges ) { addEdge(getEdgeSource(edge), addedVertex, new BaseEdge(edge).add(inc)); } + + removeAllVertices(linearChain); + return true; + } + + /** + * Get the sum of the edge weights on a linear chain of at least 2 elements + * + * @param chain a linear chain of vertices with at least 2 vertices + * @return the sum of the multiplicities along all edges connecting vertices within the chain + */ + @Requires({"chain != null", "chain.size() >= 2"}) + private int sumEdgeWeightAlongChain(final LinkedList chain) { + int sum = 0; + SeqVertex prev = null; + + for ( final SeqVertex v : chain ) { + if ( prev != null ) { + final BaseEdge e = getEdge(prev, v); + if ( e == null ) throw new IllegalStateException("Something wrong with the linear chain, got a null edge between " + prev + " and " + v); + sum += e.getMultiplicity(); + } + prev = v; + } + + return sum; } /** @@ -241,7 +343,7 @@ public class SeqGraph extends BaseGraph { protected class MergeDiamonds extends VertexBasedTransformer { @Override protected boolean tryToTransform(final SeqVertex top) { - final Set middles = outgoingVerticesOf(top); + final List middles = outgoingVerticesOf(top); if ( middles.size() <= 1 ) // we can only merge if there's at least two middle nodes return false; @@ -295,7 +397,7 @@ public class SeqGraph extends BaseGraph { protected class MergeTails extends VertexBasedTransformer { @Override protected boolean tryToTransform(final SeqVertex top) { - final Set tails = outgoingVerticesOf(top); + final List tails = outgoingVerticesOf(top); if ( tails.size() <= 1 ) return false; @@ -379,7 +481,7 @@ public class SeqGraph extends BaseGraph { protected class MergeHeadlessIncomingSources extends VertexBasedTransformer { @Override boolean tryToTransform(final SeqVertex bottom) { - final Set incoming = incomingVerticesOf(bottom); + final List incoming = incomingVerticesOf(bottom); if ( incoming.size() <= 1 ) return false; diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraphUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraphUnitTest.java index cbd7b1063..698b83199 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraphUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraphUnitTest.java @@ -51,11 +51,15 @@ import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; +import java.io.File; import java.util.ArrayList; import java.util.Arrays; +import java.util.LinkedList; import java.util.List; public class SeqGraphUnitTest extends BaseTest { + private final static boolean DEBUG = true; + private class MergeNodesWithNoVariationTestProvider extends TestDataProvider { public byte[] sequence; public int KMER_LENGTH; @@ -98,7 +102,7 @@ public class SeqGraphUnitTest extends BaseTest { return MergeNodesWithNoVariationTestProvider.getTests(MergeNodesWithNoVariationTestProvider.class); } - @Test(dataProvider = "MergeNodesWithNoVariationTestProvider", enabled = true) + @Test(dataProvider = "MergeNodesWithNoVariationTestProvider", enabled = !DEBUG) public void testMergeNodesWithNoVariation(MergeNodesWithNoVariationTestProvider cfg) { logger.warn(String.format("Test: %s", cfg.toString())); @@ -178,7 +182,7 @@ public class SeqGraphUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "IsDiamondData", enabled = true) + @Test(dataProvider = "IsDiamondData", enabled = !DEBUG) public void testIsDiamond(final SeqGraph graph, final SeqVertex v, final boolean isRootOfDiamond) { final SeqGraph.MergeDiamonds merger = graph.new MergeDiamonds(); merger.setDontModifyGraphEvenIfPossible(); @@ -311,7 +315,7 @@ public class SeqGraphUnitTest extends BaseTest { return tests.toArray(new Object[][]{}); } - @Test(dataProvider = "MergingData", enabled = true) + @Test(dataProvider = "MergingData", enabled = !DEBUG) public void testMerging(final SeqGraph graph, final SeqGraph expected) { final SeqGraph merged = (SeqGraph)graph.clone(); merged.simplifyGraph(1); @@ -333,7 +337,7 @@ public class SeqGraphUnitTest extends BaseTest { // // Should become A -> ACT -> C [ref and non-ref edges] // - @Test + @Test(enabled = !DEBUG) public void testBubbleSameBasesWithRef() { final SeqGraph graph = new SeqGraph(); final SeqVertex top = new SeqVertex("A"); @@ -351,4 +355,169 @@ public class SeqGraphUnitTest extends BaseTest { actual.simplifyGraph(); Assert.assertTrue(BaseGraph.graphEquals(actual, expected), "Wrong merging result after complete merging"); } + + @DataProvider(name = "LinearZipData") + public Object[][] makeLinearZipData() throws Exception { + List tests = new ArrayList(); + + SeqGraph graph = new SeqGraph(); + SeqGraph expected = new SeqGraph(); + + // empty graph => empty graph + tests.add(new Object[]{graph.clone(), expected.clone()}); + + SeqVertex a1 = new SeqVertex("A"); + SeqVertex c1 = new SeqVertex("C"); + SeqVertex ac1 = new SeqVertex("AC"); + + // just a single vertex + graph.addVertices(a1, c1); + expected.addVertices(a1, c1); + + tests.add(new Object[]{graph.clone(), expected.clone()}); + + graph.addEdges(a1, c1); + expected = new SeqGraph(); + expected.addVertices(ac1); + tests.add(new Object[]{graph.clone(), expected.clone()}); + + // three long chain merged corrected + SeqVertex g1 = new SeqVertex("G"); + graph.addVertices(g1); + graph.addEdges(c1, g1); + expected = new SeqGraph(); + expected.addVertex(new SeqVertex("ACG")); + tests.add(new Object[]{graph.clone(), expected.clone()}); + + // adding something that isn't connected isn't a problem + SeqVertex t1 = new SeqVertex("T"); + graph.addVertices(t1); + expected = new SeqGraph(); + expected.addVertices(new SeqVertex("ACG"), new SeqVertex("T")); + tests.add(new Object[]{graph.clone(), expected.clone()}); + + // splitting chain with branch produces the correct zipped subgraphs + final SeqVertex a2 = new SeqVertex("A"); + final SeqVertex c2 = new SeqVertex("C"); + graph = new SeqGraph(); + graph.addVertices(a1, c1, g1, t1, a2, c2); + graph.addEdges(a1, c1, g1, t1, a2); + graph.addEdges(g1, c2); + expected = new SeqGraph(); + SeqVertex acg = new SeqVertex("ACG"); + SeqVertex ta = new SeqVertex("TA"); + expected.addVertices(acg, ta, c2); + expected.addEdges(acg, ta); + expected.addEdges(acg, c2); + tests.add(new Object[]{graph.clone(), expected.clone()}); + + // Can merge chains with loops in them + { + graph = new SeqGraph(); + graph.addVertices(a1, c1, g1); + graph.addEdges(a1, c1, g1); + graph.addEdges(a1, a1); + expected = new SeqGraph(); + + SeqVertex ac = new SeqVertex("AC"); + SeqVertex cg = new SeqVertex("CG"); + + expected.addVertices(a1, cg); + expected.addEdges(a1, cg); + expected.addEdges(a1, a1); + tests.add(new Object[]{graph.clone(), expected.clone()}); + + graph.removeEdge(a1, a1); + graph.addEdges(c1, c1); + tests.add(new Object[]{graph.clone(), graph.clone()}); + + graph.removeEdge(c1, c1); + graph.addEdges(g1, g1); + expected = new SeqGraph(); + expected.addVertices(ac, g1); + expected.addEdges(ac, g1, g1); + tests.add(new Object[]{graph.clone(), expected.clone()}); + } + + // check building n element long chains + { + final List bases = Arrays.asList("A", "C", "G", "T", "TT", "GG", "CC", "AA"); + for ( final int len : Arrays.asList(1, 2, 10, 100, 1000)) { + graph = new SeqGraph(); + expected = new SeqGraph(); + SeqVertex last = null; + String expectedBases = ""; + for ( int i = 0; i < len; i++ ) { + final String seq = bases.get(i % bases.size()); + expectedBases += seq; + SeqVertex a = new SeqVertex(seq); + graph.addVertex(a); + if ( last != null ) graph.addEdge(last, a); + last = a; + } + expected.addVertex(new SeqVertex(expectedBases)); + tests.add(new Object[]{graph.clone(), expected.clone()}); + } + } + + // check that edge connections are properly maintained + { + int edgeWeight = 1; + for ( final int nIncoming : Arrays.asList(0, 2, 5, 10) ) { + for ( final int nOutgoing : Arrays.asList(0, 2, 5, 10) ) { + graph = new SeqGraph(); + expected = new SeqGraph(); + + graph.addVertices(a1, c1, g1); + graph.addEdges(a1, c1, g1); + expected.addVertex(acg); + + for ( final SeqVertex v : makeVertices(nIncoming) ) { + final BaseEdge e = new BaseEdge(false, edgeWeight++); + graph.addVertices(v); + graph.addEdge(v, a1, e); + expected.addVertex(v); + expected.addEdge(v, acg, e); + } + + for ( final SeqVertex v : makeVertices(nOutgoing) ) { + final BaseEdge e = new BaseEdge(false, edgeWeight++); + graph.addVertices(v); + graph.addEdge(g1, v, e); + expected.addVertex(v); + expected.addEdge(acg, v, e); + } + + tests.add(new Object[]{graph, expected}); + } + } + } + + return tests.toArray(new Object[][]{}); + } + + private List makeVertices(final int n) { + final List vs = new LinkedList(); + final List bases = Arrays.asList("A", "C", "G", "T", "TT", "GG", "CC", "AA"); + + for ( int i = 0; i < n; i++ ) + vs.add(new SeqVertex(bases.get(i % bases.size()))); + return vs; + } + + @Test(dataProvider = "LinearZipData", enabled = true) + public void testLinearZip(final SeqGraph graph, final SeqGraph expected) { + final SeqGraph merged = (SeqGraph)graph.clone(); + merged.zipLinearChains(); + try { + Assert.assertTrue(SeqGraph.graphEquals(merged, expected)); + } catch (AssertionError e) { + if ( ! SeqGraph.graphEquals(merged, expected) ) { + graph.printGraph(new File("graph.dot"), 0); + merged.printGraph(new File("merged.dot"), 0); + expected.printGraph(new File("expected.dot"), 0); + } + throw e; + } + } } From e9169987843d69fbca6b986f0d342fdb654f43a1 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Wed, 3 Apr 2013 10:39:45 -0400 Subject: [PATCH 10/23] Bugfix for head and tail merging code in SeqGraph -- The previous version of the head merging (and tail merging to a lesser degree) would inappropriately merge source and sinks without sufficient evidence to do so. This would introduce large deletion events at the start / end of the assemblies. Refcatored code to require 20 bp of overlap in the head or tail nodes, as well as unit tested functions to support this. --- .../haplotypecaller/graphs/SeqGraph.java | 33 ++++++++++---- .../graphs/SharedVertexSequenceSplitter.java | 43 ++++++++++++++++--- .../SharedVertexSequenceSplitterUnitTest.java | 41 ++++++++++++++++++ 3 files changed, 102 insertions(+), 15 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java index d08c2f211..4cc7aae2a 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java @@ -63,7 +63,14 @@ import java.util.Set; */ public final class SeqGraph extends BaseGraph { private final static boolean PRINT_SIMPLIFY_GRAPHS = false; - private final static int MIN_SUFFIX_TO_MERGE_TAILS = 5; + + /** + * The minimum number of common bp from the prefix (head merging) or suffix (tail merging) + * required before we'll merge in such configurations. A large value here is critical to avoid + * merging inappropriate head or tail nodes, which introduces large insertion / deletion events + * as the merge operation creates a link among the non-linked sink / source vertices + */ + private final static int MIN_COMMON_SEQUENCE_TO_MERGE_SOURCE_SINK_VERTICES = 10; /** * Construct an empty SeqGraph @@ -103,15 +110,15 @@ public final class SeqGraph extends BaseGraph { //logger.info("simplifyGraph iteration " + i); // iterate until we haven't don't anything useful didSomeWork = false; - if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + i + ".dot"), 0); + if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + i + ".1.dot"), 0); didSomeWork |= new MergeDiamonds().transformUntilComplete(); didSomeWork |= new MergeTails().transformUntilComplete(); - if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + i + ".diamonds_and_tails.dot"), 0); + if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + i + ".2.diamonds_and_tails.dot"), 0); didSomeWork |= new SplitCommonSuffices().transformUntilComplete(); - if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + i + ".split_suffix.dot"), 0); + if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + i + ".3.split_suffix.dot"), 0); didSomeWork |= new MergeCommonSuffices().transformUntilComplete(); - if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + i + ".merge_suffix.dot"), 0); + if ( PRINT_SIMPLIFY_GRAPHS ) printGraph(new File("simplifyGraph." + i + ".4.merge_suffix.dot"), 0); didSomeWork |= new MergeHeadlessIncomingSources().transformUntilComplete(); didSomeWork |= zipLinearChains(); @@ -375,7 +382,10 @@ public final class SeqGraph extends BaseGraph { // actually do the merging, returning true if at least 1 base was successfully split final SharedVertexSequenceSplitter splitter = new SharedVertexSequenceSplitter(SeqGraph.this, middles); - return splitter.splitAndUpdate(top, bottom, 1); + if (splitter.meetsMinMergableSequenceForEitherPrefixOrSuffix(1)) + return splitter.splitAndUpdate(top, bottom); + else + return false; } } @@ -408,7 +418,11 @@ public final class SeqGraph extends BaseGraph { if ( dontModifyGraphEvenIfPossible() ) return true; final SharedVertexSequenceSplitter splitter = new SharedVertexSequenceSplitter(SeqGraph.this, tails); - return splitter.splitAndUpdate(top, null, MIN_SUFFIX_TO_MERGE_TAILS); + + if (splitter.meetsMinMergableSequenceForSuffix(MIN_COMMON_SEQUENCE_TO_MERGE_SOURCE_SINK_VERTICES)) + return splitter.splitAndUpdate(top, null); + else + return false; } } @@ -492,7 +506,10 @@ public final class SeqGraph extends BaseGraph { if ( dontModifyGraphEvenIfPossible() ) return true; final SharedVertexSequenceSplitter splitter = new SharedVertexSequenceSplitter(SeqGraph.this, incoming); - return splitter.splitAndUpdate(null, bottom, 1); + if (splitter.meetsMinMergableSequenceForPrefix(MIN_COMMON_SEQUENCE_TO_MERGE_SOURCE_SINK_VERTICES)) + return splitter.splitAndUpdate(null, bottom); + else + return false; } } } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitter.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitter.java index 9834653a6..ca7faa444 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitter.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitter.java @@ -133,6 +133,14 @@ public class SharedVertexSequenceSplitter { suffixV = prefixAndSuffix.getSecond(); } + /** + * Given sequencing that are all equal, does this splitter make those into prefix or suffix nodes? + * @return true if we merge equal nodes into prefix nodes or suffix nodes + */ + protected static boolean prefersPrefixMerging() { + return true; + } + /** * Simple single-function interface to split and then update a graph * @@ -140,20 +148,41 @@ public class SharedVertexSequenceSplitter { * * @param top the top vertex, may be null * @param bottom the bottom vertex, may be null - * @param minCommonSequence the minimum prefix or suffix size necessary among the vertices to split up - * before we'll go ahead and actually do the splitting. Allows one to determine - * whether there's actually any useful splitting to do, as well as protect - * yourself against spurious splitting of nodes based on trivial amounts of overall * @return true if some useful splitting was done, false otherwise */ - public boolean splitAndUpdate(final SeqVertex top, final SeqVertex bottom, final int minCommonSequence) { - if ( prefixV.length() < minCommonSequence && suffixV.length() < minCommonSequence ) - return false; + public boolean splitAndUpdate(final SeqVertex top, final SeqVertex bottom) { split(); updateGraph(top, bottom); return true; } + /** + * Does either the common suffix or prefix have at least minCommonSequence bases in it? + * @param minCommonSequence a minimum length of the common sequence, must be >= 0 + * @return true if either suffix or prefix length >= minCommonSequence + */ + public boolean meetsMinMergableSequenceForEitherPrefixOrSuffix(final int minCommonSequence) { + return meetsMinMergableSequenceForPrefix(minCommonSequence) || meetsMinMergableSequenceForSuffix(minCommonSequence); + } + + /** + * Does the common prefix have at least minCommonSequence bases in it? + * @param minCommonSequence a minimum length of the common sequence, must be >= 0 + * @return true if prefix length >= minCommonSequence + */ + public boolean meetsMinMergableSequenceForPrefix(final int minCommonSequence) { + return prefixV.length() >= minCommonSequence; + } + + /** + * Does the common suffix have at least minCommonSequence bases in it? + * @param minCommonSequence a minimum length of the common sequence, must be >= 0 + * @return true if suffix length >= minCommonSequence + */ + public boolean meetsMinMergableSequenceForSuffix(final int minCommonSequence) { + return suffixV.length() >= minCommonSequence; + } + /** * Actually do the splitting up of the vertices * diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitterUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitterUnitTest.java index 77857c367..0930d497f 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitterUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitterUnitTest.java @@ -250,4 +250,45 @@ public class SharedVertexSequenceSplitterUnitTest extends BaseTest { } } } + + @DataProvider(name = "MeetsMinSequenceData") + public Object[][] makeMeetsMinSequenceData() { + List tests = new ArrayList(); + + final boolean prefixBiased = SharedVertexSequenceSplitter.prefersPrefixMerging(); + tests.add(new Object[]{Arrays.asList("AC", "AC"), 0, true, true}); + tests.add(new Object[]{Arrays.asList("AC", "AC"), 1, prefixBiased, ! prefixBiased}); + tests.add(new Object[]{Arrays.asList("AC", "AC"), 2, prefixBiased, ! prefixBiased}); + tests.add(new Object[]{Arrays.asList("AC", "AC"), 3, false, false}); + tests.add(new Object[]{Arrays.asList("A", "AC"), 1, true, false}); + tests.add(new Object[]{Arrays.asList("A", "AC"), 2, false, false}); + tests.add(new Object[]{Arrays.asList("AT", "AC"), 1, true, false}); + tests.add(new Object[]{Arrays.asList("AAT", "AAC"), 1, true, false}); + tests.add(new Object[]{Arrays.asList("AAT", "AAC"), 2, true, false}); + tests.add(new Object[]{Arrays.asList("AAT", "AAC"), 3, false, false}); + tests.add(new Object[]{Arrays.asList("AATCCC", "AACCCC"), 1, true, true}); + tests.add(new Object[]{Arrays.asList("AATCCC", "AACCCC"), 2, true, true}); + tests.add(new Object[]{Arrays.asList("AATCCC", "AACCCC"), 3, false, true}); + tests.add(new Object[]{Arrays.asList("AATCCC", "AACCCC"), 4, false, false}); + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "MeetsMinSequenceData") + public void testSplitterCompleteCycle(final List mids, final int minSeqLength, final boolean prefixMeets, final boolean suffixMeets) { + final SeqGraph graph = new SeqGraph(); + + final SeqVertex top = new SeqVertex("AAAAAAAA"); + final SeqVertex bot = new SeqVertex("GGGGGGGG"); + final List v = new ArrayList(); + for ( final String s : mids ) { v.add(new SeqVertex(s)); } + graph.addVertices(v.toArray(new SeqVertex[]{})); + graph.addVertices(top, bot); + for ( final SeqVertex vi : v ) { graph.addEdge(top, vi); graph.addEdge(vi, bot); } + + final SharedVertexSequenceSplitter splitter = new SharedVertexSequenceSplitter(graph, v); + Assert.assertEquals(splitter.meetsMinMergableSequenceForPrefix(minSeqLength), prefixMeets, "Prefix failed"); + Assert.assertEquals(splitter.meetsMinMergableSequenceForSuffix(minSeqLength), suffixMeets, "Suffix failed"); + Assert.assertEquals(splitter.meetsMinMergableSequenceForEitherPrefixOrSuffix(minSeqLength), suffixMeets || prefixMeets, "Either prefix or suffix failed"); + } } From 4d389a823467e355d502ee77056ed4434a04e6e3 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Sun, 31 Mar 2013 16:57:36 -0400 Subject: [PATCH 11/23] Optimizations for HC infrastructure -- outgoingVerticesOf and incomingVerticesOf return a list not a set now, as the corresponding values must be unique since our super directed graph doesn't allow multiple edges between vertices -- Make DeBruijnGraph, SeqGraph, SeqVertex, and DeBruijnVertex all final -- Cache HashCode calculation in BaseVertex -- Better docs before the pruneGraph call --- .../gatk/walkers/haplotypecaller/DeBruijnAssembler.java | 8 ++++++++ .../gatk/walkers/haplotypecaller/graphs/BaseVertex.java | 7 +++++-- .../walkers/haplotypecaller/graphs/DeBruijnGraph.java | 2 +- .../walkers/haplotypecaller/graphs/DeBruijnVertex.java | 2 +- .../gatk/walkers/haplotypecaller/graphs/SeqVertex.java | 2 +- .../haplotypecaller/graphs/SharedSequenceMerger.java | 2 +- .../walkers/haplotypecaller/graphs/BaseGraphUnitTest.java | 6 ++++-- .../walkers/haplotypecaller/graphs/SeqGraphUnitTest.java | 2 +- 8 files changed, 22 insertions(+), 9 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java index 1fd2b9c00..5d8113212 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java @@ -185,6 +185,14 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { final SeqGraph seqGraph = deBruijnGraph.convertToSequenceGraph(); if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.1.dot"), pruneFactor); + // TODO -- we need to come up with a consistent pruning algorithm. The current pruning algorithm + // TODO -- works well but it doesn't differentiate between an isolated chain that doesn't connect + // TODO -- to anything from one that's actuall has good support along the chain but just happens + // TODO -- to have a connection in the middle that has weight of < pruneFactor. Ultimately + // TODO -- the pruning algorithm really should be an error correction algorithm that knows more + // TODO -- about the structure of the data and can differeniate between an infrequent path but + // TODO -- without evidence against it (such as occurs when a region is hard to get any reads through) + // TODO -- from a error with lots of weight going along another similar path // the very first thing we need to do is zip up the graph, or pruneGraph will be too aggressive seqGraph.zipLinearChains(); if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.2.zipped.dot"), pruneFactor); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseVertex.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseVertex.java index f50b4a155..65643a2cc 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseVertex.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseVertex.java @@ -58,6 +58,7 @@ import java.util.Arrays; */ public class BaseVertex { final byte[] sequence; + int cachedHashCode = -1; /** * Create a new sequence vertex with sequence @@ -128,8 +129,10 @@ public class BaseVertex { */ @Override public int hashCode() { - // TODO -- optimization, could compute upfront once and cached in debruijn graph - return Arrays.hashCode(sequence); + if ( cachedHashCode == -1 ) { + cachedHashCode = Arrays.hashCode(sequence); + } + return cachedHashCode; } @Override diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnGraph.java index 109598029..66085fcad 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnGraph.java @@ -59,7 +59,7 @@ import java.util.Map; * User: rpoplin * Date: 2/6/13 */ -public class DeBruijnGraph extends BaseGraph { +public final class DeBruijnGraph extends BaseGraph { /** * Create an empty DeBruijnGraph with default kmer size */ diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnVertex.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnVertex.java index 4d9441efe..c240949d9 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnVertex.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnVertex.java @@ -54,7 +54,7 @@ import com.google.java.contract.Ensures; * User: ebanks, mdepristo * Date: Mar 23, 2011 */ -public class DeBruijnVertex extends BaseVertex { +public final class DeBruijnVertex extends BaseVertex { private final static byte[][] sufficesAsByteArray = new byte[256][]; static { for ( int i = 0; i < sufficesAsByteArray.length; i++ ) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqVertex.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqVertex.java index cfc2abfdc..f192b54aa 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqVertex.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqVertex.java @@ -70,7 +70,7 @@ import java.util.Arrays; * @author: depristo * @since 03/2013 */ -public class SeqVertex extends BaseVertex { +public final class SeqVertex extends BaseVertex { private static int idCounter = 0; public final int id; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java index 1c53f2332..28734e505 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java @@ -75,7 +75,7 @@ public class SharedSequenceMerger { if ( graph == null ) throw new IllegalArgumentException("graph cannot be null"); if ( ! graph.vertexSet().contains(v) ) throw new IllegalArgumentException("graph doesn't contain vertex " + v); - final Set prevs = graph.incomingVerticesOf(v); + final List prevs = graph.incomingVerticesOf(v); if ( ! canMerge(graph, v, prevs) ) return false; else { diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraphUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraphUnitTest.java index 9737f72f5..c829488ba 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraphUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraphUnitTest.java @@ -241,9 +241,11 @@ public class BaseGraphUnitTest extends BaseTest { graph.printGraph(tmp, 10); } - private void assertVertexSetEquals(final Set actual, final SeqVertex ... expected) { + private void assertVertexSetEquals(final Collection actual, final SeqVertex ... expected) { + final Set actualSet = new HashSet(actual); + Assert.assertEquals(actualSet.size(), actual.size(), "Duplicate elements found in vertex list"); final Set expectedSet = expected == null ? Collections.emptySet() : new HashSet(Arrays.asList(expected)); - Assert.assertEquals(actual, expectedSet); + Assert.assertEquals(actualSet, expectedSet); } @Test(enabled = true) diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraphUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraphUnitTest.java index 698b83199..ca43ced69 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraphUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraphUnitTest.java @@ -58,7 +58,7 @@ import java.util.LinkedList; import java.util.List; public class SeqGraphUnitTest extends BaseTest { - private final static boolean DEBUG = true; + private final static boolean DEBUG = false; private class MergeNodesWithNoVariationTestProvider extends TestDataProvider { public byte[] sequence; From af593094a2ddcb598b00e649d90aa95a4c500df5 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Sat, 30 Mar 2013 14:22:45 -0400 Subject: [PATCH 12/23] Major improvements to HC that trims down active regions before genotyping -- Trims down active regions and associated reads and haplotypes to a smaller interval based on the events actually in the haplotypes within the original active region (without extension). Radically speeds up calculations when using large active region extensions. The ActiveRegion.trim algorithm does the best job it can of trimming an active region down to a requested interval while ensuring the resulting active region has a region (and extension) no bigger than the original while spanning as much of the requested extend as possible. The trimming results in an active region that is a subset of the previous active region based on the position and types of variants found among the haplotypes -- Retire error corrector, archive old code and repurpose subsystem into a general kmer counter. The previous error corrector was just broken (conceptually) and was disabled by default in the engine. Now turning on error correction throws a UserException. Old part of the error corrector that counts kmers was extracted and put into KMerCounter.java -- Add final simplify graph call after we prune away the non-reference paths in DeBruijnAssembler --- .../haplotypecaller/DeBruijnAssembler.java | 52 ++--- .../haplotypecaller/GenotypingEngine.java | 2 +- .../haplotypecaller/HaplotypeCaller.java | 217 +++++++++++++++--- ...erErrorCorrector.java => KMerCounter.java} | 215 ++--------------- .../haplotypecaller/graphs/DeBruijnGraph.java | 1 - .../haplotypecaller/KMerCounterUnitTest.java | 84 +++++++ .../KMerErrorCorrectorUnitTest.java | 66 ------ .../utils/activeregion/ActiveRegion.java | 61 ++++- .../sting/utils/haplotype/EventMap.java | 23 ++ .../sting/utils/haplotype/Haplotype.java | 46 ++++ .../sting/utils/sam/AlignmentUtils.java | 64 ++++-- .../activeregion/ActiveRegionUnitTest.java | 73 +++++- .../utils/haplotype/HaplotypeUnitTest.java | 71 +++++- .../utils/sam/AlignmentUtilsUnitTest.java | 85 +++++++ 14 files changed, 703 insertions(+), 357 deletions(-) rename protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/{KMerErrorCorrector.java => KMerCounter.java} (50%) create mode 100644 protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerCounterUnitTest.java delete mode 100644 protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerErrorCorrectorUnitTest.java diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java index 5d8113212..40a6a79e0 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java @@ -55,6 +55,7 @@ import org.apache.commons.lang.ArrayUtils; import org.apache.log4j.Logger; import org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.*; import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.MathUtils; import org.broadinstitute.sting.utils.SWPairwiseAlignment; @@ -161,8 +162,9 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { if ( debugGraphTransformations ) graph.printGraph(new File("unpruned.dot"), pruneFactor); if ( shouldErrorCorrectKmers() ) { - graph = errorCorrect(graph); - if ( debugGraphTransformations ) graph.printGraph(new File("errorCorrected.dot"), pruneFactor); + throw new UserException("Error correction no longer supported because of the " + + "incredibly naive way this was implemented. The command line argument remains because some" + + " future subsystem will actually go and error correct the reads"); } final SeqGraph seqGraph = toSeqGraph(graph); @@ -214,6 +216,16 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { return null; seqGraph.removePathsNotConnectedToRef(); + seqGraph.simplifyGraph(); + if ( seqGraph.vertexSet().size() == 1 ) { + // we've prefectly assembled into a single reference haplotype, add a empty seq vertex to stop + // the code from blowing up. + // TODO -- ref properties should really be on the vertices, not the graph itself + final SeqVertex complete = seqGraph.vertexSet().iterator().next(); + final SeqVertex dummy = new SeqVertex(""); + seqGraph.addVertex(dummy); + seqGraph.addEdge(complete, dummy, new BaseEdge(true, 0)); + } if ( debugGraphTransformations ) seqGraph.printGraph(new File("sequenceGraph.5.final.dot"), pruneFactor); return seqGraph; @@ -332,39 +344,6 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { return true; } - /** - * Error correct the kmers in this graph, returning a new graph built from those error corrected kmers - * @return an error corrected version of this (freshly allocated graph) or simply this graph if for some reason - * we cannot actually do the error correction - */ - public DeBruijnGraph errorCorrect(final DeBruijnGraph graph) { - final KMerErrorCorrector corrector = new KMerErrorCorrector(graph.getKmerSize(), 1, 1, 5); // TODO -- should be static variables - - for( final BaseEdge e : graph.edgeSet() ) { - for ( final byte[] kmer : Arrays.asList(graph.getEdgeSource(e).getSequence(), graph.getEdgeTarget(e).getSequence())) { - // TODO -- need a cleaner way to deal with the ref weight - corrector.addKmer(kmer, e.isRef() ? 1000 : e.getMultiplicity()); - } - } - - if ( corrector.computeErrorCorrectionMap() ) { - final DeBruijnGraph correctedGraph = new DeBruijnGraph(graph.getKmerSize()); - - for( final BaseEdge e : graph.edgeSet() ) { - final byte[] source = corrector.getErrorCorrectedKmer(graph.getEdgeSource(e).getSequence()); - final byte[] target = corrector.getErrorCorrectedKmer(graph.getEdgeTarget(e).getSequence()); - if ( source != null && target != null ) { - correctedGraph.addKmersToGraph(source, target, e.isRef(), e.getMultiplicity()); - } - } - - return correctedGraph; - } else { - // the error correction wasn't possible, simply return this graph - return graph; - } - } - protected void printGraphs(final List graphs) { final int writeFirstGraphWithSizeSmallerThan = 50; @@ -461,6 +440,9 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { } } + // add genome locs to the haplotypes + for ( final Haplotype h : returnHaplotypes ) h.setGenomeLocation(activeRegionWindow); + if ( returnHaplotypes.size() < returnHaplotypes.size() ) logger.info("Found " + returnHaplotypes.size() + " candidate haplotypes of " + returnHaplotypes.size() + " possible combinations to evaluate every read against at " + refLoc); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java index 7cdc57464..abd502c2b 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java @@ -153,7 +153,7 @@ public class GenotypingEngine { if (haplotypes == null || haplotypes.isEmpty()) throw new IllegalArgumentException("haplotypes input should be non-empty and non-null, got "+haplotypes); if (haplotypeReadMap == null || haplotypeReadMap.isEmpty()) throw new IllegalArgumentException("haplotypeReadMap input should be non-empty and non-null, got "+haplotypeReadMap); if (ref == null || ref.length == 0 ) throw new IllegalArgumentException("ref bytes input should be non-empty and non-null, got "+ref); - if (refLoc == null || refLoc.getStop()-refLoc.getStart()+1 != ref.length) throw new IllegalArgumentException(" refLoc must be non-null and length must match ref bytes, got "+refLoc); + if (refLoc == null || refLoc.size() != ref.length) throw new IllegalArgumentException(" refLoc must be non-null and length must match ref bytes, got "+refLoc); if (activeRegionWindow == null ) throw new IllegalArgumentException("activeRegionWindow must be non-null, got "+activeRegionWindow); if (activeAllelesToGenotype == null ) throw new IllegalArgumentException("activeAllelesToGenotype must be non-null, got "+activeAllelesToGenotype); if (genomeLocParser == null ) throw new IllegalArgumentException("genomeLocParser must be non-null, got "+genomeLocParser); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index 53fffec61..bce179ee1 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -77,6 +77,7 @@ import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; import org.broadinstitute.sting.utils.fragments.FragmentCollection; import org.broadinstitute.sting.utils.fragments.FragmentUtils; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.sting.utils.haplotype.EventMap; import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.sting.utils.haplotype.HaplotypeBaseComparator; import org.broadinstitute.sting.utils.haplotype.LDMerger; @@ -300,6 +301,7 @@ public class HaplotypeCaller extends ActiveRegionWalker implem @Argument(fullName="debugGraphTransformations", shortName="debugGraphTransformations", doc="If specified, we will write DOT formatted graph files out of the assembler for only this graph size", required = false) protected int debugGraphTransformations = -1; + // TODO -- not currently useful @Hidden @Argument(fullName="useLowQualityBasesForAssembly", shortName="useLowQualityBasesForAssembly", doc="If specified, we will include low quality bases when doing the assembly", required = false) protected boolean useLowQualityBasesForAssembly = false; @@ -308,6 +310,10 @@ public class HaplotypeCaller extends ActiveRegionWalker implem @Argument(fullName="useNewLDMerger", shortName="useNewLDMerger", doc="If specified, we will include low quality bases when doing the assembly", required = false) protected boolean useNewLDMerger = false; + @Hidden + @Argument(fullName="trimActiveRegions", shortName="trimActiveRegions", doc="If specified, we will trim down the active region from the full region (active + extension) to just the active interval for genotyping", required = false) + protected boolean trimActiveRegions = false; + // the UG engines private UnifiedGenotyperEngine UG_engine = null; private UnifiedGenotyperEngine UG_engine_simple_genotyper = null; @@ -329,6 +335,13 @@ public class HaplotypeCaller extends ActiveRegionWalker implem // reference base padding size private static final int REFERENCE_PADDING = 500; + // include at least this many bases around an event for calling it + private final static int PADDING_AROUND_SNPS_FOR_CALLING = 20; + private final static int PADDING_AROUND_OTHERS_FOR_CALLING = 150; + + // the maximum extent into the full active region extension that we're willing to go in genotyping our events + private final static int MAX_GENOTYPING_ACTIVE_REGION_EXTENSION = 25; + private final static int maxReadsInRegionPerSample = 1000; // TODO -- should be an argument private final static int minReadsPerAlignmentStart = 5; // TODO -- should be an argument @@ -490,7 +503,7 @@ public class HaplotypeCaller extends ActiveRegionWalker implem final byte qual = p.getQual(); if( p.isDeletion() || qual > (byte) 18) { int AA = 0; final int AB = 1; int BB = 2; - if( p.getBase() != ref.getBase() || p.isDeletion() || p.isBeforeDeletionStart() || p.isAfterDeletionEnd() || p.isBeforeInsertion() || p.isAfterInsertion() || p.isNextToSoftClip() ) { + if( p.getBase() != ref.getBase() || p.isDeletion() || p.isBeforeDeletionStart() || p.isAfterDeletionEnd() || p.isBeforeInsertion() || p.isAfterInsertion() || p.isNextToSoftClip() ) { AA = 2; BB = 0; if( p.isNextToSoftClip() ) { @@ -521,58 +534,53 @@ public class HaplotypeCaller extends ActiveRegionWalker implem //--------------------------------------------------------------------------------------------------------------- @Override - public Integer map( final ActiveRegion activeRegion, final RefMetaDataTracker metaDataTracker ) { + public Integer map( final ActiveRegion originalActiveRegion, final RefMetaDataTracker metaDataTracker ) { if ( justDetermineActiveRegions ) // we're benchmarking ART and/or the active region determination code in the HC, just leave without doing any work return 1; - final List activeAllelesToGenotype = new ArrayList(); + if( !originalActiveRegion.isActive() ) { return 0; } // Not active so nothing to do! + final List activeAllelesToGenotype = new ArrayList(); if( UG_engine.getUAC().GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES ) { for( final VariantContext vc : allelesToGenotype ) { - if( activeRegion.getLocation().overlapsP( getToolkit().getGenomeLocParser().createGenomeLoc(vc) ) ) { + if( originalActiveRegion.getLocation().overlapsP( getToolkit().getGenomeLocParser().createGenomeLoc(vc) ) ) { activeAllelesToGenotype.add(vc); // do something with these VCs during GGA mode } } allelesToGenotype.removeAll( activeAllelesToGenotype ); + // No alleles found in this region so nothing to do! + if ( activeAllelesToGenotype.isEmpty() ) { return 0; } + } else { + if( originalActiveRegion.size() == 0 ) { return 0; } // No reads here so nothing to do! } - if( !activeRegion.isActive() ) { return 0; } // Not active so nothing to do! - if( activeRegion.size() == 0 && UG_engine.getUAC().GenotypingMode != GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES ) { return 0; } // No reads here so nothing to do! - if( UG_engine.getUAC().GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES && activeAllelesToGenotype.isEmpty() ) { return 0; } // No alleles found in this region so nothing to do! + // run the local assembler, getting back a collection of information on how we should proceed + final AssemblyResult assemblyResult = assembleReads(originalActiveRegion, activeAllelesToGenotype); - finalizeActiveRegion(activeRegion); // merge overlapping fragments, clip adapter and low qual tails - - final Haplotype referenceHaplotype = new Haplotype(activeRegion.getActiveRegionReference(referenceReader), true); // Create the reference haplotype which is the bases from the reference that make up the active region - final byte[] fullReferenceWithPadding = activeRegion.getActiveRegionReference(referenceReader, REFERENCE_PADDING); - final GenomeLoc paddedReferenceLoc = getPaddedLoc(activeRegion); - - final List haplotypes = assemblyEngine.runLocalAssembly( activeRegion, referenceHaplotype, fullReferenceWithPadding, paddedReferenceLoc, activeAllelesToGenotype ); - if( haplotypes.size() == 1 ) { return 1; } // only the reference haplotype remains so nothing else to do! - - final List filteredReads = filterNonPassingReads( activeRegion ); // filter out reads from genotyping which fail mapping quality based criteria - if( activeRegion.size() == 0 ) { return 1; } // no reads remain after filtering so nothing else to do! - - // sort haplotypes to take full advantage of haplotype start offset optimizations in PairHMM - Collections.sort( haplotypes, new HaplotypeBaseComparator() ); - - if (dontGenotype) - return 1; + // abort early if something is out of the acceptable range + if( assemblyResult.haplotypes.size() == 1 ) { return 1; } // only the reference haplotype remains so nothing else to do! + if( assemblyResult.regionForGenotyping.size() == 0 ) { return 1; } // no reads remain after filtering so nothing else to do! + if (dontGenotype) return 1; // user requested we not proceed // evaluate each sample's reads against all haplotypes - final Map stratifiedReadMap = likelihoodCalculationEngine.computeReadLikelihoods( haplotypes, splitReadsBySample( activeRegion.getReads() ) ); + //logger.info("Computing read likelihoods with " + assemblyResult.regionForGenotyping.size() + " reads"); + final Map stratifiedReadMap = likelihoodCalculationEngine.computeReadLikelihoods( assemblyResult.haplotypes, splitReadsBySample( assemblyResult.regionForGenotyping.getReads() ) ); + + // filter out reads from genotyping which fail mapping quality based criteria + final List filteredReads = filterNonPassingReads( assemblyResult.regionForGenotyping ); final Map> perSampleFilteredReadList = splitReadsBySample( filteredReads ); // subset down to only the best haplotypes to be genotyped in all samples ( in GGA mode use all discovered haplotypes ) - final List bestHaplotypes = selectBestHaplotypesForGenotyping(haplotypes, stratifiedReadMap); + final List bestHaplotypes = selectBestHaplotypesForGenotyping(assemblyResult.haplotypes, stratifiedReadMap); final GenotypingEngine.CalledHaplotypes calledHaplotypes = genotypingEngine.assignGenotypeLikelihoods( UG_engine, bestHaplotypes, stratifiedReadMap, perSampleFilteredReadList, - fullReferenceWithPadding, - paddedReferenceLoc, - activeRegion.getLocation(), + assemblyResult.fullReferenceWithPadding, + assemblyResult.paddedReferenceLoc, + assemblyResult.regionForGenotyping.getLocation(), getToolkit().getGenomeLocParser(), activeAllelesToGenotype ); @@ -583,7 +591,10 @@ public class HaplotypeCaller extends ActiveRegionWalker implem } if ( bamWriter != null ) { - haplotypeBAMWriter.writeReadsAlignedToHaplotypes(haplotypes, paddedReferenceLoc, bestHaplotypes, calledHaplotypes.getCalledHaplotypes(), stratifiedReadMap); + haplotypeBAMWriter.writeReadsAlignedToHaplotypes(assemblyResult.haplotypes, assemblyResult.paddedReferenceLoc, + bestHaplotypes, + calledHaplotypes.getCalledHaplotypes(), + stratifiedReadMap); } if( DEBUG ) { logger.info("----------------------------------------------------------------------------------"); } @@ -591,6 +602,152 @@ public class HaplotypeCaller extends ActiveRegionWalker implem return 1; // One active region was processed during this map call } + private final static class AssemblyResult { + final List haplotypes; + final ActiveRegion regionForGenotyping; + final byte[] fullReferenceWithPadding; + final GenomeLoc paddedReferenceLoc; + + private AssemblyResult(List haplotypes, ActiveRegion regionForGenotyping, byte[] fullReferenceWithPadding, GenomeLoc paddedReferenceLoc) { + this.haplotypes = haplotypes; + this.regionForGenotyping = regionForGenotyping; + this.fullReferenceWithPadding = fullReferenceWithPadding; + this.paddedReferenceLoc = paddedReferenceLoc; + } + } + + /** + * High-level function that runs the assembler on the active region reads, + * returning a data structure with the resulting information needed + * for further HC steps + * + * @param activeRegion the region we should assemble + * @param activeAllelesToGenotype additional alleles we might need to genotype (can be empty) + * @return the AssemblyResult describing how to proceed with genotyping + */ + protected AssemblyResult assembleReads(final ActiveRegion activeRegion, final List activeAllelesToGenotype) { + // Create the reference haplotype which is the bases from the reference that make up the active region + finalizeActiveRegion(activeRegion); // merge overlapping fragments, clip adapter and low qual tails + + final Haplotype referenceHaplotype = new Haplotype(activeRegion.getActiveRegionReference(referenceReader), true); + final byte[] fullReferenceWithPadding = activeRegion.getActiveRegionReference(referenceReader, REFERENCE_PADDING); + final GenomeLoc paddedReferenceLoc = getPaddedLoc(activeRegion); + + final List haplotypes = assemblyEngine.runLocalAssembly( activeRegion, referenceHaplotype, fullReferenceWithPadding, paddedReferenceLoc, activeAllelesToGenotype ); + + if ( trimActiveRegions ) { + return trimActiveRegion(activeRegion, haplotypes, fullReferenceWithPadding, paddedReferenceLoc); + } else { + // we don't want to or cannot create a trimmed active region, so go ahead and use the old one + return new AssemblyResult(haplotypes, activeRegion, fullReferenceWithPadding, paddedReferenceLoc); + } + } + + /** + * Trim down the active region to just enough to properly genotype the events among the haplotypes + * + * This function merely creates the region, but it doesn't populate the reads back into the region + * + * @param region our full active region + * @param haplotypes the list of haplotypes we've created from assembly + * @param ref the reference bases over the full padded location + * @param refLoc the span of the reference bases + * @return a new ActiveRegion trimmed down to just what's needed for genotyping, or null if we couldn't do this successfully + */ + private ActiveRegion createTrimmedRegion(final ActiveRegion region, final List haplotypes, final byte[] ref, final GenomeLoc refLoc) { + EventMap.buildEventMapsForHaplotypes(haplotypes, ref, refLoc, DEBUG); + final TreeSet allContexts = EventMap.getAllVariantContexts(haplotypes); + final GenomeLocParser parser = getToolkit().getGenomeLocParser(); + + if ( allContexts.isEmpty() ) // no variants, so just return the current region + return null; + + final List withinActiveRegion = new LinkedList(); + int pad = PADDING_AROUND_SNPS_FOR_CALLING; + GenomeLoc trimLoc = null; + for ( final VariantContext vc : allContexts ) { + final GenomeLoc vcLoc = parser.createGenomeLoc(vc); + if ( region.getLocation().overlapsP(vcLoc) ) { + if ( ! vc.isSNP() ) // if anything isn't a SNP use the bigger padding + pad = PADDING_AROUND_OTHERS_FOR_CALLING; + trimLoc = trimLoc == null ? vcLoc : trimLoc.endpointSpan(vcLoc); + withinActiveRegion.add(vc); + } + } + + // we don't actually have anything in the region after removing variants that don't overlap the region's full location + if ( trimLoc == null ) return null; + + final GenomeLoc maxSpan = getToolkit().getGenomeLocParser().createPaddedGenomeLoc(region.getLocation(), MAX_GENOTYPING_ACTIVE_REGION_EXTENSION); + final GenomeLoc idealSpan = getToolkit().getGenomeLocParser().createPaddedGenomeLoc(trimLoc, pad); + final GenomeLoc finalSpan = maxSpan.intersect(idealSpan); + + final ActiveRegion trimmedRegion = region.trim(finalSpan); + if ( DEBUG ) { + logger.info("events : " + withinActiveRegion); + logger.info("trimLoc : " + trimLoc); + logger.info("pad : " + pad); + logger.info("idealSpan : " + idealSpan); + logger.info("maxSpan : " + maxSpan); + logger.info("finalSpan : " + finalSpan); + logger.info("regionSpan : " + trimmedRegion.getExtendedLoc() + " size is " + trimmedRegion.getExtendedLoc().size()); + } + return trimmedRegion; + } + + /** + * Trim down the active region to just enough to properly genotype the events among the haplotypes + * + * @param originalActiveRegion our full active region + * @param haplotypes the list of haplotypes we've created from assembly + * @param fullReferenceWithPadding the reference bases over the full padded location + * @param paddedReferenceLoc the span of the reference bases + * @return an AssemblyResult containing the trimmed active region with all of the reads we should use + * trimmed down as well, and a revised set of haplotypes. If trimming failed this function + * may choose to use the originalActiveRegion without modification + */ + private AssemblyResult trimActiveRegion(final ActiveRegion originalActiveRegion, + final List haplotypes, + final byte[] fullReferenceWithPadding, + final GenomeLoc paddedReferenceLoc) { + final ActiveRegion trimmedActiveRegion = createTrimmedRegion(originalActiveRegion, haplotypes, fullReferenceWithPadding, paddedReferenceLoc); + + if ( trimmedActiveRegion == null ) + return new AssemblyResult(haplotypes, originalActiveRegion, fullReferenceWithPadding, paddedReferenceLoc); + + // trim down the haplotypes + final Set haplotypeSet = new HashSet(haplotypes.size()); + for ( final Haplotype h : haplotypes ) { + final Haplotype trimmed = h.trim(trimmedActiveRegion.getExtendedLoc()); + if ( trimmed != null ) { + haplotypeSet.add(trimmed); + } else if ( DEBUG ) { + logger.info("Throwing out haplotype " + h + " with cigar " + h.getCigar() + " because it starts with or ends with an insertion or deletion when trimmed to " + trimmedActiveRegion.getExtendedLoc()); + } + } + + // create the final list of trimmed haplotypes + final List trimmedHaplotypes = new ArrayList(haplotypeSet); + + // sort haplotypes to take full advantage of haplotype start offset optimizations in PairHMM + Collections.sort( trimmedHaplotypes, new HaplotypeBaseComparator() ); + + if ( DEBUG ) logger.info("Trimming haplotypes reduced number of haplotypes from " + haplotypes.size() + " to only " + trimmedHaplotypes.size()); + + // trim down the reads and add them to the trimmed active region + final List trimmedReads = new ArrayList(originalActiveRegion.getReads().size()); + for( final GATKSAMRecord read : originalActiveRegion.getReads() ) { + final GATKSAMRecord clippedRead = ReadClipper.hardClipToRegion( read, trimmedActiveRegion.getExtendedLoc().getStart(), trimmedActiveRegion.getExtendedLoc().getStop() ); + if( trimmedActiveRegion.readOverlapsRegion(clippedRead) && clippedRead.getReadLength() > 0 ) { + trimmedReads.add(clippedRead); + } + } + trimmedActiveRegion.clearReads(); + trimmedActiveRegion.addAll(ReadUtils.sortReadsByCoordinate(trimmedReads)); + + return new AssemblyResult(trimmedHaplotypes, trimmedActiveRegion, fullReferenceWithPadding, paddedReferenceLoc); + } + /** * Select the best N haplotypes according to their likelihoods, if appropriate * diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerErrorCorrector.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerCounter.java similarity index 50% rename from protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerErrorCorrector.java rename to protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerCounter.java index b051e5411..1f0903753 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerErrorCorrector.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerCounter.java @@ -51,110 +51,31 @@ import org.apache.log4j.Logger; import java.util.*; /** - * generic utility function that error corrects kmers based on counts - * - * This class provides a generic facility for remapping kmers (byte[] of constant size) - * that occur infrequently to those that occur frequently, based on their simple edit distance - * as measured by mismatches. - * - * The overall workflow of using this class is simple. First, you create the class with - * parameters determining how the error correction should proceed. Next, you provide all - * of the kmers you see in your data. Once all kmers have been added, you call computeErrorCorrectionMap - * to tell this class that all kmers have been added and its time to determine error correcting - * mapping from observed kmers to corrected kmers. This correction looks for low-count (as determined - * by maxCountToCorrect) kmers and chooses the best kmer (minimizing mismatches) among those - * with at least minCountOfKmerToBeCorrection occurrences to error correct the kmer to. If - * there is no kmer with less than maxMismatchesToCorrect then the kmer will be mapped to - * null, indicating the kmer should not be used. - * - * TODO -- for ease of implementation this class uses strings instead of byte[] as those cannot - * TODO -- be added to hashmaps (more specifically, those don't implement .equals). A more efficient - * TODO -- version would use the byte[] directly - * - * TODO -- this is just not the right way to implement error correction in the graph. Basically, the - * right way to think about this is error correcting reads: - * - * * - * ACTGAT - * ACT - * CTG - * TGA - * GAT - * - * Now suppose the G is an error. What you are doing is asking for each 3mer in the read whether it's high quality - * or not. Suppose the answer is - * - * * - * ACTGAT - * ACT -- yes - * CTG -- no [CTG is unusual] - * TGA -- no [TGA is unusual] - * GAT -- yes [maybe GAT is just common, even through its an error] - * - * As we do this process it's clear how we can figure out which positions in the read likely harbor errors, and - * then go search around those bases in the read in an attempt to fix the read. We don't have to compute for - * every bad kmer it's best match, as that's just not the problem we are thinking looking to solve. We are actually - * looking for a change to a read such that all spanning kmers are well-supported. This class is being disabled - * until we figure implement this change. + * generic utility class that counts kmers * + * Basically you add kmers to the counter, and it tells you how many occurrences of each kmer it's seen. * * User: depristo * Date: 3/8/13 * Time: 1:16 PM */ -public class KMerErrorCorrector { - private final static Logger logger = Logger.getLogger(KMerErrorCorrector.class); - - /** - * The maximum number of bad kmer -> good kmer correction operations we'll consider doing before - * aborting for efficiency reasons. Basically, the current algorithm sucks, and is O(n^2), and - * so we cannot simply error correct 10K bad kmers against a db of 100K kmers if we ever want - * to finish running in a reasonable amount of time. This isn't worth fixing because fundamentally - * the entire error correction algorithm is just not right (i.e., it's correct but not ideal conceptually - * so we'll just fix the conceptual problem than the performance issue). - */ - private final static int MAX_CORRECTION_OPS_TO_ALLOW = 5000 * 1000; +public class KMerCounter { + private final static Logger logger = Logger.getLogger(KMerCounter.class); /** * A map of for each kmer to its num occurrences in addKmers */ - Map countsByKMer = new HashMap(); + private final Map countsByKMer = new HashMap(); + private final int kmerLength; /** - * A map from raw kmer -> error corrected kmer - */ - Map rawToErrorCorrectedMap = null; - - final int kmerLength; - final int maxCountToCorrect; - final int maxMismatchesToCorrect; - final int minCountOfKmerToBeCorrection; - - /** - * Create a new kmer corrector + * Create a new kmer counter * * @param kmerLength the length of kmers we'll be counting to error correct, must be >= 1 - * @param maxCountToCorrect kmers with < maxCountToCorrect will try to be error corrected to another kmer, must be >= 0 - * @param maxMismatchesToCorrect the maximum number of mismatches between a to-be-corrected kmer and its - * best match that we attempt to error correct. If no sufficiently similar - * kmer exists, it will be remapped to null. Must be >= 1 - * @param minCountOfKmerToBeCorrection the minimum count of a kmer to be considered a target for correction. - * That is, kmers that need correction will only be matched with kmers - * with at least minCountOfKmerToBeCorrection occurrences. Must be >= 1 */ - public KMerErrorCorrector(final int kmerLength, - final int maxCountToCorrect, - final int maxMismatchesToCorrect, - final int minCountOfKmerToBeCorrection) { + public KMerCounter(final int kmerLength) { if ( kmerLength < 1 ) throw new IllegalArgumentException("kmerLength must be > 0 but got " + kmerLength); - if ( maxCountToCorrect < 0 ) throw new IllegalArgumentException("maxCountToCorrect must be >= 0 but got " + maxCountToCorrect); - if ( maxMismatchesToCorrect < 1 ) throw new IllegalArgumentException("maxMismatchesToCorrect must be >= 1 but got " + maxMismatchesToCorrect); - if ( minCountOfKmerToBeCorrection < 1 ) throw new IllegalArgumentException("minCountOfKmerToBeCorrection must be >= 1 but got " + minCountOfKmerToBeCorrection); - this.kmerLength = kmerLength; - this.maxCountToCorrect = maxCountToCorrect; - this.maxMismatchesToCorrect = maxMismatchesToCorrect; - this.minCountOfKmerToBeCorrection = minCountOfKmerToBeCorrection; } /** @@ -165,7 +86,17 @@ public class KMerErrorCorrector { protected void addKmers(final String ... kmers) { for ( final String kmer : kmers ) addKmer(kmer, 1); - computeErrorCorrectionMap(); + } + + /** + * Get the count of kmer in this kmer counter + * @param kmer a non-null counter to get + * @return a positive integer + */ + public int getKmerCount(final byte[] kmer) { + if ( kmer == null ) throw new IllegalArgumentException("kmer cannot be null"); + final CountedKmer counted = countsByKMer.get(new String(kmer)); + return counted == null ? 0 : counted.count; } /** @@ -178,68 +109,9 @@ public class KMerErrorCorrector { addKmer(new String(rawKmer), kmerCount); } - - /** - * Get the error corrected kmer for rawKmer - * - * @param rawKmer a kmer that was already added that we want to get an error corrected version for - * @return an error corrected kmer to use instead of rawKmer. May be == rawKmer if no error correction - * is not necessary. May be null, indicating the rawKmer shouldn't be used at all - */ - public byte[] getErrorCorrectedKmer(final byte[] rawKmer) { - final String result = getErrorCorrectedKmer(new String(rawKmer)); - return result == null ? null : result.getBytes(); - } - - /** - * Indicate that no more kmers will be added to the kmer error corrector, so that the - * error correction data structure should be computed from the added kmers. Enabled calls - * to getErrorCorrectedKmer, and disable calls to addKmer. - * - * @return true if the error correction map could actually be computed, false if for any reason - * (efficiency, memory, we're out to lunch) a correction map couldn't be created. - */ - public boolean computeErrorCorrectionMap() { - if ( countsByKMer == null ) - throw new IllegalStateException("computeErrorCorrectionMap can only be called once"); - - final LinkedList needsCorrection = new LinkedList(); - final List goodKmers = new ArrayList(countsByKMer.size()); - - rawToErrorCorrectedMap = new HashMap(countsByKMer.size()); - for ( final CountedKmer countedKmer: countsByKMer.values() ) { - if ( countedKmer.count <= maxCountToCorrect ) - needsCorrection.add(countedKmer); - else { - // todo -- optimization could make not in map mean == - rawToErrorCorrectedMap.put(countedKmer.kmer, countedKmer.kmer); - - // only allow corrections to kmers with at least this count - if ( countedKmer.count >= minCountOfKmerToBeCorrection ) - goodKmers.add(countedKmer); - } - } - - // cleanup memory -- we don't need the counts for each kmer any longer - countsByKMer = null; - - if ( goodKmers.size() * needsCorrection.size() > MAX_CORRECTION_OPS_TO_ALLOW ) - return false; - else { - Collections.sort(goodKmers); - for ( final CountedKmer toCorrect : needsCorrection ) { - final String corrected = findClosestKMer(toCorrect, goodKmers); - rawToErrorCorrectedMap.put(toCorrect.kmer, corrected); - } - - return true; - } - } - protected void addKmer(final String rawKmer, final int kmerCount) { if ( rawKmer.length() != kmerLength ) throw new IllegalArgumentException("bad kmer length " + rawKmer + " expected size " + kmerLength); if ( kmerCount < 0 ) throw new IllegalArgumentException("bad kmerCount " + kmerCount); - if ( countsByKMer == null ) throw new IllegalStateException("Cannot add kmers to an already finalized error corrector"); CountedKmer countFromMap = countsByKMer.get(rawKmer); if ( countFromMap == null ) { @@ -249,55 +121,10 @@ public class KMerErrorCorrector { countFromMap.count += kmerCount; } - protected String findClosestKMer(final CountedKmer kmer, final Collection goodKmers) { - String bestMatch = null; - int minMismatches = Integer.MAX_VALUE; - - for ( final CountedKmer goodKmer : goodKmers ) { - final int mismatches = countMismatches(kmer.kmer, goodKmer.kmer, minMismatches); - if ( mismatches < minMismatches ) { - minMismatches = mismatches; - bestMatch = goodKmer.kmer; - } - - // if we find an edit-distance 1 result, abort early, as we know there can be no edit distance 0 results - if ( mismatches == 1 ) - break; - } - - return minMismatches > maxMismatchesToCorrect ? null : bestMatch; - } - - protected int countMismatches(final String one, final String two, final int currentBest) { - int mismatches = 0; - for ( int i = 0; i < one.length(); i++ ) { - mismatches += one.charAt(i) == two.charAt(i) ? 0 : 1; - if ( mismatches > currentBest ) - break; - if ( mismatches > maxMismatchesToCorrect ) - return Integer.MAX_VALUE; - } - return mismatches; - } - - protected String getErrorCorrectedKmer(final String rawKmer) { - if ( rawToErrorCorrectedMap == null ) throw new IllegalStateException("Cannot get error corrected kmers until after computeErrorCorrectionMap has been called"); - if ( rawKmer.length() != kmerLength ) throw new IllegalArgumentException("bad kmer length " + rawKmer + " expected size " + kmerLength); - return rawToErrorCorrectedMap.get(rawKmer); - } - @Override public String toString() { - final StringBuilder b = new StringBuilder("KMerErrorCorrector{"); - if ( rawToErrorCorrectedMap == null ) { - b.append("counting ").append(countsByKMer.size()).append(" distinct kmers"); - } else { - for ( Map.Entry toCorrect : rawToErrorCorrectedMap.entrySet() ) { - final boolean correcting = ! toCorrect.getKey().equals(toCorrect.getValue()); - if ( correcting ) - b.append(String.format("%n\tCorrecting %s -> %s", toCorrect.getKey(), toCorrect.getValue())); - } - } + final StringBuilder b = new StringBuilder("KMerCounter{"); + b.append("counting ").append(countsByKMer.size()).append(" distinct kmers"); b.append("\n}"); return b.toString(); } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnGraph.java index 66085fcad..c11841dac 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/DeBruijnGraph.java @@ -47,7 +47,6 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; import com.google.java.contract.Ensures; -import org.broadinstitute.sting.gatk.walkers.haplotypecaller.KMerErrorCorrector; import java.util.Arrays; import java.util.HashMap; diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerCounterUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerCounterUnitTest.java new file mode 100644 index 000000000..56197047b --- /dev/null +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerCounterUnitTest.java @@ -0,0 +1,84 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.gatk.walkers.haplotypecaller; + +import org.broadinstitute.sting.BaseTest; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class KMerCounterUnitTest extends BaseTest { + @Test + public void testMyData() { + final KMerCounter counter = new KMerCounter(3); + + Assert.assertNotNull(counter.toString()); + + counter.addKmers( + "ATG", "ATG", "ATG", "ATG", + "ACC", "ACC", "ACC", + "AAA", "AAA", + "CTG", + "NNA", + "CCC" + ); + + testCounting(counter, "ATG", 4); + testCounting(counter, "ACC", 3); + testCounting(counter, "AAA", 2); + testCounting(counter, "CTG", 1); + testCounting(counter, "NNA", 1); + testCounting(counter, "CCC", 1); + testCounting(counter, "NNN", 0); + testCounting(counter, "NNC", 0); + + Assert.assertNotNull(counter.toString()); + } + + private void testCounting(final KMerCounter counter, final String in, final int expectedCount) { + Assert.assertEquals(counter.getKmerCount(in.getBytes()), expectedCount); + } +} diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerErrorCorrectorUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerErrorCorrectorUnitTest.java deleted file mode 100644 index f8a540b70..000000000 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/KMerErrorCorrectorUnitTest.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2012 The Broad Institute - * - * Permission is hereby granted, free of charge, to any person - * obtaining a copy of this software and associated documentation - * files (the "Software"), to deal in the Software without - * restriction, including without limitation the rights to use, - * copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following - * conditions: - * - * The above copyright notice and this permission notice shall be - * included in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES - * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT - * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, - * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR - * THE USE OR OTHER DEALINGS IN THE SOFTWARE. - */ - -package org.broadinstitute.sting.gatk.walkers.haplotypecaller; - -import org.broadinstitute.sting.BaseTest; -import org.testng.Assert; -import org.testng.annotations.Test; - -public class KMerErrorCorrectorUnitTest extends BaseTest { - @Test - public void testMyData() { - final KMerErrorCorrector corrector = new KMerErrorCorrector(3, 1, 2, 2); - - Assert.assertNotNull(corrector.toString()); - - corrector.addKmers( - "ATG", "ATG", "ATG", "ATG", - "ACC", "ACC", "ACC", - "AAA", "AAA", - "CTG", // -> ATG - "NNA", // -> AAA - "CCC", // => ACC - "NNN", // => null - "NNC" // => ACC [because of min count won't go to NNA] - ); - - testCorrection(corrector, "ATG", "ATG"); - testCorrection(corrector, "ACC", "ACC"); - testCorrection(corrector, "AAA", "AAA"); - testCorrection(corrector, "CTG", "ATG"); - testCorrection(corrector, "NNA", "AAA"); - testCorrection(corrector, "CCC", "ACC"); - testCorrection(corrector, "NNN", null); - testCorrection(corrector, "NNC", "ACC"); - - Assert.assertNotNull(corrector.toString()); - } - - private void testCorrection(final KMerErrorCorrector corrector, final String in, final String out) { - Assert.assertEquals(corrector.getErrorCorrectedKmer(in), out); - Assert.assertEquals(corrector.getErrorCorrectedKmer(in.getBytes()), out == null ? null : out.getBytes()); - } -} diff --git a/public/java/src/org/broadinstitute/sting/utils/activeregion/ActiveRegion.java b/public/java/src/org/broadinstitute/sting/utils/activeregion/ActiveRegion.java index b38d6575e..2f4c1b55d 100644 --- a/public/java/src/org/broadinstitute/sting/utils/activeregion/ActiveRegion.java +++ b/public/java/src/org/broadinstitute/sting/utils/activeregion/ActiveRegion.java @@ -149,7 +149,7 @@ public class ActiveRegion implements HasGenomeLocation { @Override public String toString() { - return "ActiveRegion " + activeRegionLoc.toString() + " active?=" + isActive() + " nReads=" + reads.size() + " "; + return "ActiveRegion " + activeRegionLoc.toString() + " active?=" + isActive() + " nReads=" + reads.size(); } /** @@ -374,6 +374,8 @@ public class ActiveRegion implements HasGenomeLocation { * * Note that the returned list may be empty, if this active region doesn't overlap the set at all * + * Note that the resulting regions are all empty, regardless of whether the current active region has reads + * * @param intervals a non-null set of intervals that are allowed * @return an ordered list of active region where each interval is contained within intervals */ @@ -383,14 +385,59 @@ public class ActiveRegion implements HasGenomeLocation { final List clippedRegions = new LinkedList(); for ( final GenomeLoc overlapping : allOverlapping ) { - final GenomeLoc subLoc = getLocation().intersect(overlapping); - final int subStart = subLoc.getStart() - getLocation().getStart(); - final int subEnd = subStart + subLoc.size(); - final List subStates = supportingStates.isEmpty() ? supportingStates : supportingStates.subList(subStart, subEnd); - final ActiveRegion clipped = new ActiveRegion( subLoc, subStates, isActive, genomeLocParser, extension ); - clippedRegions.add(clipped); + clippedRegions.add(trim(overlapping, extension)); } return clippedRegions; } + + /** + * Trim this active to just the newExtent, producing a new active region without any reads that has only + * the extent of newExtend intersected with the current extent + * @param newExtent the new extend of the active region we want + * @param newExtension the extension size we want for the newly trimmed active region + * @return a non-null, empty active region + */ + public ActiveRegion trim(final GenomeLoc newExtent, final int newExtension) { + if ( newExtent == null ) throw new IllegalArgumentException("Active region extent cannot be null"); + + final GenomeLoc subLoc = getLocation().intersect(newExtent); + final int subStart = subLoc.getStart() - getLocation().getStart(); + final int subEnd = subStart + subLoc.size(); + final List subStates = supportingStates.isEmpty() ? supportingStates : supportingStates.subList(subStart, subEnd); + return new ActiveRegion( subLoc, subStates, isActive, genomeLocParser, newExtension ); + } + + /** + * Trim this active to no more than the newExtent, producing a new active region without any reads that + * attempts to provide the best possible representation of this active region covering the newExtent. + * + * The challenge here is that newExtent may (1) be larger than can be represented by this active region + * + its original extension and (2) the extension must be symmetric on both sides. This algorithm + * therefore determines how best to represent newExtent as a subset of the span of this + * region with a padding value that captures as much of the newExtent as possible. + * + * For example, suppose this active region is + * + * Active: 100-200 with extension of 50, so that the true span is 50-250 + * NewExtent: 150-225 saying that we'd ideally like to just have bases 150-225 + * + * Here we represent the active region as a active region from 150-200 with 25 bp of padding. + * + * The overall constraint is that the active region can never exceed the original active region, and + * the extension is chosen to maximize overlap with the desired region + * + * @param newExtent the new extend of the active region we want + * @return a non-null, empty active region + */ + public ActiveRegion trim(final GenomeLoc newExtent) { + if ( newExtent == null ) throw new IllegalArgumentException("Active region extent cannot be null"); + + final GenomeLoc subActive = getLocation().intersect(newExtent); + final int requiredOnRight = Math.max(newExtent.getStop() - subActive.getStop(), 0); + final int requiredOnLeft = Math.max(subActive.getStart() - newExtent.getStart(), 0); + final int requiredExtension = Math.min(Math.max(requiredOnLeft, requiredOnRight), getExtension()); + + return new ActiveRegion( subActive, Collections.emptyList(), isActive, genomeLocParser, requiredExtension ); + } } \ No newline at end of file diff --git a/public/java/src/org/broadinstitute/sting/utils/haplotype/EventMap.java b/public/java/src/org/broadinstitute/sting/utils/haplotype/EventMap.java index 1d33e328d..ab5f23894 100644 --- a/public/java/src/org/broadinstitute/sting/utils/haplotype/EventMap.java +++ b/public/java/src/org/broadinstitute/sting/utils/haplotype/EventMap.java @@ -390,4 +390,27 @@ public class EventMap extends TreeMap { return startPosKeySet; } + + private static class VariantContextComparator implements Comparator { + @Override + public int compare(VariantContext vc1, VariantContext vc2) { + return vc1.getStart() - vc2.getStart(); + } + } + + /** + * Get all of the VariantContexts in the event maps for all haplotypes, sorted by their start position + * @param haplotypes the set of haplotypes to grab the VCs from + * @return a sorted set of variant contexts + */ + public static TreeSet getAllVariantContexts( final List haplotypes ) { + // Using the cigar from each called haplotype figure out what events need to be written out in a VCF file + final TreeSet vcs = new TreeSet(new VariantContextComparator()); + + for( final Haplotype h : haplotypes ) { + vcs.addAll(h.getEventMap().getVariantContexts()); + } + + return vcs; + } } diff --git a/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java b/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java index 081fd14e0..bacee7942 100644 --- a/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java +++ b/public/java/src/org/broadinstitute/sting/utils/haplotype/Haplotype.java @@ -103,6 +103,40 @@ public class Haplotype extends Allele { this.genomeLocation = loc; } + /** + * Create a new Haplotype derived from this one that exactly spans the provided location + * + * Note that this haplotype must have a contain a genome loc for this operation to be successful. If no + * GenomeLoc is contained than @throws an IllegalStateException + * + * Also loc must be fully contained within this Haplotype's genomeLoc. If not an IllegalArgumentException is + * thrown. + * + * @param loc a location completely contained within this Haplotype's location + * @return a new Haplotype within only the bases spanning the provided location, or null for some reason the haplotype would be malformed if + */ + public Haplotype trim(final GenomeLoc loc) { + if ( loc == null ) throw new IllegalArgumentException("Loc cannot be null"); + if ( genomeLocation == null ) throw new IllegalStateException("Cannot trim a Haplotype without containing GenomeLoc"); + if ( ! genomeLocation.containsP(loc) ) throw new IllegalArgumentException("Can only trim a Haplotype to a containing span. My loc is " + genomeLocation + " but wanted trim to " + loc); + if ( getCigar() == null ) throw new IllegalArgumentException("Cannot trim haplotype without a cigar " + this); + + final int newStart = loc.getStart() - this.genomeLocation.getStart(); + final int newStop = newStart + loc.size() - 1; + final byte[] newBases = AlignmentUtils.getBasesCoveringRefInterval(newStart, newStop, getBases(), 0, getCigar()); + final Cigar newCigar = AlignmentUtils.trimCigarByReference(getCigar(), newStart, newStop); + + if ( newBases == null || AlignmentUtils.startsOrEndsWithInsertionOrDeletion(newCigar) ) + // we cannot meaningfully chop down the haplotype, so return null + return null; + + final Haplotype ret = new Haplotype(newBases, isReference()); + ret.setCigar(newCigar); + ret.setGenomeLocation(loc); + ret.setAlignmentStartHapwrtRef(newStart + getAlignmentStartHapwrtRef()); + return ret; + } + @Override public boolean equals( Object h ) { return h instanceof Haplotype && Arrays.equals(getBases(), ((Haplotype) h).getBases()); @@ -126,6 +160,18 @@ public class Haplotype extends Allele { return getDisplayString(); } + /** + * Get the span of this haplotype (may be null) + * @return a potentially null genome loc + */ + public GenomeLoc getGenomeLocation() { + return genomeLocation; + } + + public void setGenomeLocation(GenomeLoc genomeLocation) { + this.genomeLocation = genomeLocation; + } + public long getStartPosition() { return genomeLocation.getStart(); } diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java b/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java index 9b25b00c6..2208302fb 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/AlignmentUtils.java @@ -48,6 +48,24 @@ public final class AlignmentUtils { // cannot be instantiated private AlignmentUtils() { } + /** + * Does cigar start or end with a deletion operation? + * + * @param cigar a non-null cigar to test + * @return true if the first or last operator of cigar is a D + */ + public static boolean startsOrEndsWithInsertionOrDeletion(final Cigar cigar) { + if ( cigar == null ) throw new IllegalArgumentException("Cigar cannot be null"); + + if ( cigar.isEmpty() ) + return false; + + final CigarOperator first = cigar.getCigarElement(0).getOperator(); + final CigarOperator last = cigar.getCigarElement(cigar.numCigarElements()-1).getOperator(); + return first == CigarOperator.D || first == CigarOperator.I || last == CigarOperator.D || last == CigarOperator.I; + } + + /** * Get the byte[] from bases that cover the reference interval refStart -> refEnd given the * alignment of bases to the reference (basesToRefCigar) and the start offset of the bases on the reference @@ -55,6 +73,8 @@ public final class AlignmentUtils { * refStart and refEnd are 0 based offsets that we want to obtain. In the client code, if the reference * bases start at position X and you want Y -> Z, refStart should be Y - X and refEnd should be Z - X. * + * If refStart or refEnd would start or end the new bases within a deletion, this function will return null + * * @param bases * @param refStart * @param refEnd @@ -63,7 +83,7 @@ public final class AlignmentUtils { * 10 (meaning bases doesn't fully span the reference), which would be indicated by basesStartOnRef == 10. * It's not trivial to eliminate this parameter because it's tied up with the cigar * @param basesToRefCigar the cigar that maps the bases to the reference genome - * @return a non-null byte[] + * @return a byte[] containing the bases covering this interval, or null if we would start or end within a deletion */ public static byte[] getBasesCoveringRefInterval(final int refStart, final int refEnd, final byte[] bases, final int basesStartOnRef, final Cigar basesToRefCigar) { if ( refStart < 0 || refEnd < refStart ) throw new IllegalArgumentException("Bad start " + refStart + " and/or stop " + refEnd); @@ -74,33 +94,41 @@ public final class AlignmentUtils { int refPos = basesStartOnRef; int basesPos = 0; - int basesStart = -1; int basesStop = -1; boolean done = false; for ( int iii = 0; ! done && iii < basesToRefCigar.numCigarElements(); iii++ ) { final CigarElement ce = basesToRefCigar.getCigarElement(iii); - final int bInc, rInc; switch ( ce.getOperator() ) { - case I: bInc = 1; rInc = 0; break; - case M: case X: case EQ: bInc = rInc = 1; break; - case D: bInc = 0; rInc = 1; break; + case I: + basesPos += ce.getLength(); + break; + case M: case X: case EQ: + for ( int i = 0; i < ce.getLength(); i++ ) { + if ( refPos == refStart ) + basesStart = basesPos; + if ( refPos == refEnd ) { + basesStop = basesPos; + done = true; + break; + } + refPos++; + basesPos++; + } + break; + case D: + for ( int i = 0; i < ce.getLength(); i++ ) { + if ( refPos == refEnd || refPos == refStart ) { + // if we ever reach a ref position that is either a start or an end, we fail + return null; + } + refPos++; + } + break; default: throw new IllegalStateException("Unsupported operator " + ce); } - - for ( int i = 0; i < ce.getLength(); i++ ) { - if ( refPos == refStart ) - basesStart = basesPos; - if ( refPos == refEnd ) { - basesStop = basesPos; - done = true; - break; - } - refPos += rInc; - basesPos += bInc; - } } if ( basesStart == -1 || basesStop == -1 ) diff --git a/public/java/test/org/broadinstitute/sting/utils/activeregion/ActiveRegionUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/activeregion/ActiveRegionUnitTest.java index 7f0f93704..ad5fd3642 100644 --- a/public/java/test/org/broadinstitute/sting/utils/activeregion/ActiveRegionUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/activeregion/ActiveRegionUnitTest.java @@ -49,7 +49,7 @@ import java.util.*; public class ActiveRegionUnitTest extends BaseTest { - private final static boolean DEBUG = true; + private final static boolean DEBUG = false; private GenomeLocParser genomeLocParser; private IndexedFastaSequenceFile seq; private String contig; @@ -309,4 +309,75 @@ public class ActiveRegionUnitTest extends BaseTest { } } } + + // ----------------------------------------------------------------------------------------------- + // + // Make sure we can properly cut up an active region based on engine intervals + // + // ----------------------------------------------------------------------------------------------- + + @DataProvider(name = "TrimActiveRegionData") + public Object[][] makeTrimActiveRegionData() { + List tests = new ArrayList(); + + // fully enclosed within active region + tests.add(new Object[]{ + genomeLocParser.createGenomeLoc("20", 10, 20), 10, + genomeLocParser.createGenomeLoc("20", 15, 16), + genomeLocParser.createGenomeLoc("20", 15, 16), 0}); + + tests.add(new Object[]{ + genomeLocParser.createGenomeLoc("20", 10, 20), 10, + genomeLocParser.createGenomeLoc("20", 10, 15), + genomeLocParser.createGenomeLoc("20", 10, 15), 0}); + + tests.add(new Object[]{ + genomeLocParser.createGenomeLoc("20", 10, 20), 10, + genomeLocParser.createGenomeLoc("20", 15, 20), + genomeLocParser.createGenomeLoc("20", 15, 20), 0}); + + // needs extra padding on the right + tests.add(new Object[]{ + genomeLocParser.createGenomeLoc("20", 10, 20), 10, + genomeLocParser.createGenomeLoc("20", 15, 25), + genomeLocParser.createGenomeLoc("20", 15, 20), 5}); + + // needs extra padding on the left + tests.add(new Object[]{ + genomeLocParser.createGenomeLoc("20", 10, 20), 10, + genomeLocParser.createGenomeLoc("20", 5, 15), + genomeLocParser.createGenomeLoc("20", 10, 15), 5}); + + // needs extra padding on both + tests.add(new Object[]{ + genomeLocParser.createGenomeLoc("20", 10, 20), 10, + genomeLocParser.createGenomeLoc("20", 7, 21), + genomeLocParser.createGenomeLoc("20", 10, 20), 3}); + tests.add(new Object[]{ + genomeLocParser.createGenomeLoc("20", 10, 20), 10, + genomeLocParser.createGenomeLoc("20", 9, 23), + genomeLocParser.createGenomeLoc("20", 10, 20), 3}); + + // desired span captures everything, so we're returning everything. Tests that extension is set correctly + tests.add(new Object[]{ + genomeLocParser.createGenomeLoc("20", 10, 20), 10, + genomeLocParser.createGenomeLoc("20", 1, 50), + genomeLocParser.createGenomeLoc("20", 10, 20), 10}); + + // At the start of the chromosome, potentially a bit weird + tests.add(new Object[]{ + genomeLocParser.createGenomeLoc("20", 1, 10), 10, + genomeLocParser.createGenomeLoc("20", 1, 50), + genomeLocParser.createGenomeLoc("20", 1, 10), 10}); + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "TrimActiveRegionData") + public void testTrimActiveRegion(final GenomeLoc regionLoc, final int extension, final GenomeLoc desiredSpan, final GenomeLoc expectedActiveRegion, final int expectedExtension) { + final ActiveRegion region = new ActiveRegion(regionLoc, Collections.emptyList(), true, genomeLocParser, extension); + final ActiveRegion trimmed = region.trim(desiredSpan); + Assert.assertEquals(trimmed.getLocation(), expectedActiveRegion, "Incorrect region"); + Assert.assertEquals(trimmed.getExtension(), expectedExtension, "Incorrect region"); + } } \ No newline at end of file diff --git a/public/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeUnitTest.java index fe02aea9f..cfbc4a3e0 100644 --- a/public/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/haplotype/HaplotypeUnitTest.java @@ -31,12 +31,15 @@ import net.sf.samtools.CigarElement; import net.sf.samtools.CigarOperator; import net.sf.samtools.TextCigarCodec; import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.UnvalidatingGenomeLoc; import org.broadinstitute.sting.utils.haplotype.Haplotype; import org.broadinstitute.variant.variantcontext.Allele; import org.broadinstitute.variant.variantcontext.VariantContext; import org.broadinstitute.variant.variantcontext.VariantContextBuilder; import org.testng.Assert; import org.testng.annotations.BeforeClass; +import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.util.*; @@ -45,10 +48,6 @@ import java.util.*; * Basic unit test for Haplotype Class */ public class HaplotypeUnitTest extends BaseTest { - @BeforeClass - public void init() { - } - @Test public void testSimpleInsertionAllele() { final String bases = "ACTGGTCAACTGGTCAACTGGTCAACTGGTCA"; @@ -183,4 +182,68 @@ public class HaplotypeUnitTest extends BaseTest { Assert.assertEquals(makeHCForCigar("AGCT", "1M1I1I1I").getConsolidatedPaddedCigar(1).toString(), "1M3I1M"); Assert.assertEquals(makeHCForCigar("AGCT", "1M1I1I1I").getConsolidatedPaddedCigar(2).toString(), "1M3I2M"); } + + @DataProvider(name = "TrimmingData") + public Object[][] makeTrimmingData() { + List tests = new ArrayList(); + + // this functionality can be adapted to provide input data for whatever you might want in your data + final GenomeLoc loc = new UnvalidatingGenomeLoc("20", 0, 10, 20); + final String fullBases = "ACGTAACCGGT"; + for ( int trimStart = loc.getStart(); trimStart < loc.getStop(); trimStart++ ) { + for ( int trimStop = trimStart; trimStop <= loc.getStop(); trimStop++ ) { + final int start = trimStart - loc.getStart(); + final int stop = start + (trimStop - trimStart) + 1; + final GenomeLoc trimmedLoc = new UnvalidatingGenomeLoc("20", 0, start + loc.getStart(), stop + loc.getStart() - 1); + final String expectedBases = fullBases.substring(start, stop); + final Haplotype full = new Haplotype(fullBases.getBytes(), loc); + final Haplotype trimmed = new Haplotype(expectedBases.getBytes(), trimmedLoc); + + final int hapStart = 10; + full.setAlignmentStartHapwrtRef(hapStart); + full.setCigar(TextCigarCodec.getSingleton().decode(full.length() + "M")); + + trimmed.setAlignmentStartHapwrtRef(hapStart + start); + trimmed.setCigar(TextCigarCodec.getSingleton().decode(trimmed.length() + "M")); + + tests.add(new Object[]{full, trimmedLoc, trimmed}); + } + } + + final Haplotype full = new Haplotype("ACT".getBytes(), new UnvalidatingGenomeLoc("20", 0, 10, 14)); + full.setAlignmentStartHapwrtRef(10); + full.setCigar(TextCigarCodec.getSingleton().decode("1M2D2M")); + tests.add(new Object[]{full, new UnvalidatingGenomeLoc("20", 0, 11, 12), null}); + tests.add(new Object[]{full, new UnvalidatingGenomeLoc("20", 0, 10, 12), null}); + tests.add(new Object[]{full, new UnvalidatingGenomeLoc("20", 0, 11, 13), null}); + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "TrimmingData") + public void testTrim(final Haplotype full, final GenomeLoc trimTo, final Haplotype expected) { + final Haplotype actual = full.trim(trimTo); + if ( expected != null ) { + Assert.assertEquals(actual.getBases(), expected.getBases()); + Assert.assertEquals(actual.getStartPosition(), trimTo.getStart()); + Assert.assertEquals(actual.getStopPosition(), trimTo.getStop()); + Assert.assertEquals(actual.getCigar(), expected.getCigar()); + Assert.assertEquals(actual.getAlignmentStartHapwrtRef(), expected.getAlignmentStartHapwrtRef()); + } else { + Assert.assertNull(actual); + } + } + + @Test(expectedExceptions = IllegalArgumentException.class) + public void testBadTrimLoc() { + final GenomeLoc loc = new UnvalidatingGenomeLoc("20", 0, 10, 20); + final Haplotype hap = new Haplotype("ACGTAACCGGT".getBytes(), loc); + hap.trim(new UnvalidatingGenomeLoc("20", 0, 1, 20)); + } + + @Test(expectedExceptions = IllegalStateException.class) + public void testBadTrimNoLoc() { + final Haplotype hap = new Haplotype("ACGTAACCGGT".getBytes()); + hap.trim(new UnvalidatingGenomeLoc("20", 0, 1, 20)); + } } diff --git a/public/java/test/org/broadinstitute/sting/utils/sam/AlignmentUtilsUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/sam/AlignmentUtilsUnitTest.java index 125450257..2a2d80206 100644 --- a/public/java/test/org/broadinstitute/sting/utils/sam/AlignmentUtilsUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/sam/AlignmentUtilsUnitTest.java @@ -948,4 +948,89 @@ public class AlignmentUtilsUnitTest { Assert.assertEquals(actualEndPos, pos + elt.getLength()); Assert.assertEquals(AlignmentUtils.consolidateCigar(new Cigar(elts)), expectedCigar); } + + @DataProvider(name = "GetBasesCoveringRefIntervalData") + public Object[][] makeGetBasesCoveringRefIntervalData() { + List tests = new ArrayList(); + + // matches + // 0123 + // ACGT + tests.add(new Object[]{"ACGT", 0, 3, "4M", "ACGT"}); + tests.add(new Object[]{"ACGT", 1, 3, "4M", "CGT"}); + tests.add(new Object[]{"ACGT", 1, 2, "4M", "CG"}); + tests.add(new Object[]{"ACGT", 1, 1, "4M", "C"}); + + // deletions + // 012345 + // AC--GT + tests.add(new Object[]{"ACGT", 0, 5, "2M2D2M", "ACGT"}); + tests.add(new Object[]{"ACGT", 1, 5, "2M2D2M", "CGT"}); + tests.add(new Object[]{"ACGT", 2, 5, "2M2D2M", null}); + tests.add(new Object[]{"ACGT", 3, 5, "2M2D2M", null}); + tests.add(new Object[]{"ACGT", 4, 5, "2M2D2M", "GT"}); + tests.add(new Object[]{"ACGT", 5, 5, "2M2D2M", "T"}); + tests.add(new Object[]{"ACGT", 0, 4, "2M2D2M", "ACG"}); + tests.add(new Object[]{"ACGT", 0, 3, "2M2D2M", null}); + tests.add(new Object[]{"ACGT", 0, 2, "2M2D2M", null}); + tests.add(new Object[]{"ACGT", 0, 1, "2M2D2M", "AC"}); + tests.add(new Object[]{"ACGT", 0, 0, "2M2D2M", "A"}); + + // insertions + // 01--23 + // ACTTGT + tests.add(new Object[]{"ACTTGT", 0, 3, "2M2I2M", "ACTTGT"}); + tests.add(new Object[]{"ACTTGT", 1, 3, "2M2I2M", "CTTGT"}); + tests.add(new Object[]{"ACTTGT", 2, 3, "2M2I2M", "GT"}); + tests.add(new Object[]{"ACTTGT", 3, 3, "2M2I2M", "T"}); + tests.add(new Object[]{"ACTTGT", 0, 2, "2M2I2M", "ACTTG"}); + tests.add(new Object[]{"ACTTGT", 0, 1, "2M2I2M", "AC"}); + tests.add(new Object[]{"ACTTGT", 1, 2, "2M2I2M", "CTTG"}); + tests.add(new Object[]{"ACTTGT", 2, 2, "2M2I2M", "G"}); + tests.add(new Object[]{"ACTTGT", 1, 1, "2M2I2M", "C"}); + + tests.add(new Object[]{"ACGT", 0, 1, "2M2I", "AC"}); + tests.add(new Object[]{"ACGT", 1, 1, "2M2I", "C"}); + tests.add(new Object[]{"ACGT", 0, 0, "2M2I", "A"}); + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "GetBasesCoveringRefIntervalData", enabled = true) + public void testGetBasesCoveringRefInterval(final String basesString, final int refStart, final int refEnd, final String cigarString, final String expected) { + final byte[] actualBytes = AlignmentUtils.getBasesCoveringRefInterval(refStart, refEnd, basesString.getBytes(), 0, TextCigarCodec.getSingleton().decode(cigarString)); + if ( expected == null ) + Assert.assertNull(actualBytes); + else + Assert.assertEquals(new String(actualBytes), expected); + } + + @DataProvider(name = "StartsOrEndsWithInsertionOrDeletionData") + public Object[][] makeStartsOrEndsWithInsertionOrDeletionData() { + List tests = new ArrayList(); + + tests.add(new Object[]{"2M", false}); + tests.add(new Object[]{"1D2M", true}); + tests.add(new Object[]{"2M1D", true}); + tests.add(new Object[]{"2M1I", true}); + tests.add(new Object[]{"1I2M", true}); + tests.add(new Object[]{"1M1I2M", false}); + tests.add(new Object[]{"1M1D2M", false}); + tests.add(new Object[]{"1M1I2M1I", true}); + tests.add(new Object[]{"1M1I2M1D", true}); + tests.add(new Object[]{"1D1M1I2M", true}); + tests.add(new Object[]{"1I1M1I2M", true}); + tests.add(new Object[]{"1M1I2M1I1M", false}); + tests.add(new Object[]{"1M1I2M1D1M", false}); + tests.add(new Object[]{"1M1D2M1D1M", false}); + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "StartsOrEndsWithInsertionOrDeletionData", enabled = true) + public void testStartsOrEndsWithInsertionOrDeletion(final String cigar, final boolean expected) { + Assert.assertEquals(AlignmentUtils.startsOrEndsWithInsertionOrDeletion(TextCigarCodec.getSingleton().decode(cigar)), expected); + } + + } From 9b5c55a84ab46a09471f79c0694f792af994fd58 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Wed, 3 Apr 2013 08:59:09 -0400 Subject: [PATCH 13/23] LikelihoodCalculationEngine will now only use reads longer than the minReadLength, which is currently fixed at 20 bp --- .../LikelihoodCalculationEngine.java | 27 ++++++++++++++++--- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java index 543b23d9c..a90f8959d 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java @@ -69,6 +69,7 @@ public class LikelihoodCalculationEngine { private final byte constantGCP; private final boolean DEBUG; private final PairHMM pairHMM; + private final int minReadLength = 20; public LikelihoodCalculationEngine( final byte constantGCP, final boolean debug, final PairHMM.HMM_IMPLEMENTATION hmmType ) { @@ -90,9 +91,16 @@ public class LikelihoodCalculationEngine { DEBUG = debug; } - public Map computeReadLikelihoods( final List haplotypes, final Map> perSampleReadList ) { - - final Map stratifiedReadMap = new HashMap(); + /** + * Initialize our pairHMM with parameters appropriate to the haplotypes and reads we're going to evaluate + * + * After calling this routine the PairHMM will be configured to best evaluate all reads in the samples + * against the set of haplotypes + * + * @param haplotypes a non-null list of haplotypes + * @param perSampleReadList a mapping from sample -> reads + */ + private void initializePairHMM(final List haplotypes, final Map> perSampleReadList) { int X_METRIC_LENGTH = 0; for( final Map.Entry> sample : perSampleReadList.entrySet() ) { for( final GATKSAMRecord read : sample.getValue() ) { @@ -108,13 +116,20 @@ public class LikelihoodCalculationEngine { // initialize arrays to hold the probabilities of being in the match, insertion and deletion cases pairHMM.initialize(X_METRIC_LENGTH, Y_METRIC_LENGTH); + } - // for each sample's reads + public Map computeReadLikelihoods( final List haplotypes, final Map> perSampleReadList ) { + // configure the HMM + initializePairHMM(haplotypes, perSampleReadList); + + // Add likelihoods for each sample's reads to our stratifiedReadMap + final Map stratifiedReadMap = new HashMap(); for( final Map.Entry> sampleEntry : perSampleReadList.entrySet() ) { //if( DEBUG ) { System.out.println("Evaluating sample " + sample + " with " + perSampleReadList.get( sample ).size() + " passing reads"); } // evaluate the likelihood of the reads given those haplotypes stratifiedReadMap.put(sampleEntry.getKey(), computeReadLikelihoods(haplotypes, sampleEntry.getValue())); } + return stratifiedReadMap; } @@ -128,6 +143,10 @@ public class LikelihoodCalculationEngine { final PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap = new PerReadAlleleLikelihoodMap(); for( final GATKSAMRecord read : reads ) { + if ( read.getReadLength() < minReadLength ) + // don't consider any reads that have a read length < the minimum + continue; + final byte[] overallGCP = new byte[read.getReadLength()]; Arrays.fill( overallGCP, constantGCP ); // Is there a way to derive empirical estimates for this from the data? // NOTE -- must clone anything that gets modified here so we don't screw up future uses of the read From 15461567d77e9de85e339c5cbcd7ebf6357246a6 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Wed, 3 Apr 2013 18:47:59 -0400 Subject: [PATCH 14/23] HaplotypeCaller no longer uses reads with poor likelihoods w.r.t. any haplotype -- The previous likelihood calculation proceeds as normal, but after each read has been evaluated against each haplotype we go through the read / allele / likelihoods map and eliminate all reads that have poor fit to any of the haplotypes. This functionality stops us from making a particular type of error in the HC, where we have a haplotype that's very far from the reference allele but not the right true haplotype. All of the reads that are slightly closer to this FP haplotype than the reference previously generated enormous likelihoods in favor of this FP haplotype because they were closer to it than the reference, even if each read had many mismatches w.r.t. the FP haplotype (and so the FP haplotype was a bad model for the true underlying haplotype). --- .../LikelihoodCalculationEngine.java | 17 +++- .../PerReadAlleleLikelihoodMapUnitTest.java | 94 +++++++++++++++---- .../genotyper/PerReadAlleleLikelihoodMap.java | 58 ++++++++++++ 3 files changed, 151 insertions(+), 18 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java index a90f8959d..1fb873e81 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/LikelihoodCalculationEngine.java @@ -71,6 +71,13 @@ public class LikelihoodCalculationEngine { private final PairHMM pairHMM; private final int minReadLength = 20; + /** + * The expected rate of random sequencing errors for a read originating from its true haplotype. + * + * For example, if this is 0.01, then we'd expect 1 error per 100 bp. + */ + private final double EXPECTED_ERROR_RATE_PER_BASE = 0.02; + public LikelihoodCalculationEngine( final byte constantGCP, final boolean debug, final PairHMM.HMM_IMPLEMENTATION hmmType ) { switch (hmmType) { @@ -127,7 +134,14 @@ public class LikelihoodCalculationEngine { for( final Map.Entry> sampleEntry : perSampleReadList.entrySet() ) { //if( DEBUG ) { System.out.println("Evaluating sample " + sample + " with " + perSampleReadList.get( sample ).size() + " passing reads"); } // evaluate the likelihood of the reads given those haplotypes - stratifiedReadMap.put(sampleEntry.getKey(), computeReadLikelihoods(haplotypes, sampleEntry.getValue())); + final PerReadAlleleLikelihoodMap map = computeReadLikelihoods(haplotypes, sampleEntry.getValue()); + + final List removedReads = map.filterPoorlyModelledReads(EXPECTED_ERROR_RATE_PER_BASE); +// logger.info("Removed " + removedReads.size() + " reads because of bad likelihoods from sample " + sampleEntry.getKey()); +// for ( final GATKSAMRecord read : removedReads ) +// logger.info("\tRemoved " + read.getReadName()); + + stratifiedReadMap.put(sampleEntry.getKey(), map); } return stratifiedReadMap; @@ -170,6 +184,7 @@ public class LikelihoodCalculationEngine { perReadAlleleLikelihoodMap.add(read, alleleVersions.get(haplotype), log10l); } } + return perReadAlleleLikelihoodMap; } diff --git a/protected/java/test/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMapUnitTest.java b/protected/java/test/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMapUnitTest.java index 84bdfd19b..c50849a54 100644 --- a/protected/java/test/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMapUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMapUnitTest.java @@ -46,6 +46,8 @@ package org.broadinstitute.sting.utils.genotyper; +import net.sf.samtools.*; +import org.apache.commons.lang.ArrayUtils; import org.broadinstitute.sting.BaseTest; import org.broadinstitute.variant.variantcontext.Allele; import org.broadinstitute.sting.utils.BaseUtils; @@ -54,33 +56,16 @@ import org.broadinstitute.sting.utils.pileup.PileupElement; import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; import org.broadinstitute.sting.utils.pileup.ReadBackedPileupImpl; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; -import org.broadinstitute.variant.variantcontext.Allele; import org.broadinstitute.sting.utils.Utils; import java.util.Map; import java.util.List; import org.testng.Assert; import org.testng.annotations.Test; import net.sf.picard.reference.IndexedFastaSequenceFile; -import net.sf.samtools.SAMFileHeader; -import net.sf.samtools.SAMFileReader; -import net.sf.samtools.SAMRecord; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLocParser; -import org.broadinstitute.sting.utils.Utils; -import org.broadinstitute.sting.utils.activeregion.ActiveRegion; -import org.broadinstitute.sting.utils.activeregion.ActivityProfileState; import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; -import org.broadinstitute.sting.utils.pileup.PileupElement; -import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; -import org.broadinstitute.sting.utils.pileup.ReadBackedPileupImpl; -import org.broadinstitute.sting.utils.sam.ArtificialBAMBuilder; import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils; -import org.broadinstitute.sting.utils.sam.GATKSAMRecord; -import org.broadinstitute.sting.utils.sam.GATKSamRecordFactory; -import org.broadinstitute.variant.variantcontext.Allele; -import org.broadinstitute.variant.variantcontext.VariantContext; -import org.broadinstitute.variant.variantcontext.VariantContextBuilder; -import org.broadinstitute.variant.vcf.VCFCodec; import java.io.File; import java.io.FileNotFoundException; import java.util.*; @@ -235,7 +220,82 @@ public class PerReadAlleleLikelihoodMapUnitTest extends BaseTest { Assert.assertEquals(downsampledStrat.get(base_A).size(),(int) (pileup.depthOfCoverage()/2) - 1); Assert.assertEquals(downsampledStrat.get(base_C).size(),(int) (pileup.depthOfCoverage()/2)); Assert.assertEquals(downsampledStrat.get(base_T).size(),0); + } + + @DataProvider(name = "PoorlyModelledReadData") + public Object[][] makePoorlyModelledReadData() { + List tests = new ArrayList(); + + // this functionality can be adapted to provide input data for whatever you might want in your data + tests.add(new Object[]{10, 0.1, false, Arrays.asList(0.0)}); + tests.add(new Object[]{10, 0.1, true, Arrays.asList(-10.0)}); + tests.add(new Object[]{10, 0.1, false, Arrays.asList(0.0, -10.0)}); + tests.add(new Object[]{10, 0.1, true, Arrays.asList(-5.0, -10.0)}); + tests.add(new Object[]{100, 0.1, false, Arrays.asList(-5.0, -10.0)}); + tests.add(new Object[]{100, 0.01, true, Arrays.asList(-5.0, -10.0)}); + tests.add(new Object[]{100, 0.01, false, Arrays.asList(-5.0, -10.0, -3.0)}); + tests.add(new Object[]{100, 0.01, false, Arrays.asList(-5.0, -10.0, -2.0)}); + tests.add(new Object[]{100, 0.01, true, Arrays.asList(-5.0, -10.0, -4.0)}); + tests.add(new Object[]{100, 0.001, true, Arrays.asList(-5.0, -10.0)}); + tests.add(new Object[]{100, 0.001, false, Arrays.asList(-5.0, -10.0, 0.0)}); + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "PoorlyModelledReadData") + public void testPoorlyModelledRead(final int readLen, final double maxErrorRatePerBase, final boolean expected, final List log10likelihoods) { + final byte[] bases = Utils.dupBytes((byte)'A', readLen); + final byte[] quals = Utils.dupBytes((byte) 30, readLen); + + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals, readLen + "M"); + + final PerReadAlleleLikelihoodMap map = new PerReadAlleleLikelihoodMap(); + final boolean actual = map.readIsPoorlyModelled(read, log10likelihoods, maxErrorRatePerBase); + Assert.assertEquals(actual, expected); + } + @DataProvider(name = "RemovingPoorlyModelledReadData") + public Object[][] makeRemovingPoorlyModelledReadData() { + List tests = new ArrayList(); + + // this functionality can be adapted to provide input data for whatever you might want in your data + final int readLen = 10; + for ( int nReads = 0; nReads < 4; nReads++ ) { + for ( int nBad = 0; nBad <= nReads; nBad++ ) { + final int nGood = nReads - nBad; + tests.add(new Object[]{readLen, nReads, nBad, nGood}); + } + } + + return tests.toArray(new Object[][]{}); + } + + @Test(dataProvider = "RemovingPoorlyModelledReadData") + public void testRemovingPoorlyModelledReads(final int readLen, final int nReads, final int nBad, final int nGood) { + final PerReadAlleleLikelihoodMap map = new PerReadAlleleLikelihoodMap(); + final Set goodReads = new HashSet(); + final Set badReads = new HashSet(); + for ( int readI = 0; readI < nReads; readI++ ) { + final boolean bad = readI < nBad; + final double likelihood = bad ? -100.0 : 0.0; + + final byte[] bases = Utils.dupBytes((byte)'A', readLen); + final byte[] quals = Utils.dupBytes((byte) 30, readLen); + + final Allele allele = Allele.create(Utils.dupString("A", readI+1)); + + final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(bases, quals, readLen + "M"); + read.setReadName("readName" + readI); + map.add(read, allele, likelihood); + (bad ? badReads : goodReads).add(read); + } + + final List removedReads = map.filterPoorlyModelledReads(0.01); + Assert.assertEquals(removedReads.size(), nBad, "nBad " + nBad + " nGood " + nGood); + Assert.assertEquals(new HashSet(removedReads), badReads, "nBad " + nBad + " nGood " + nGood); + Assert.assertEquals(map.size(), nGood, "nBad " + nBad + " nGood " + nGood); + Assert.assertTrue(map.getStoredElements().containsAll(goodReads), "nBad " + nBad + " nGood " + nGood); + Assert.assertEquals(map.getStoredElements().size(), nGood, "nBad " + nBad + " nGood " + nGood); } } \ No newline at end of file diff --git a/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java b/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java index 02618100d..201e3b9b4 100644 --- a/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java +++ b/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java @@ -251,4 +251,62 @@ public class PerReadAlleleLikelihoodMap { } return sb.toString(); } + + /** + * Remove reads from this map that are poorly modelled w.r.t. their per allele likelihoods + * + * Goes through each read in this map, and if it is poorly modelled removes it from the map. + * + * @see #readIsPoorlyModelled(org.broadinstitute.sting.utils.sam.GATKSAMRecord, java.util.Collection, double) + * for more information about the poorly modelled test. + * + * @param maxErrorRatePerBase see equivalent parameter in #readIsPoorlyModelled + * @return the list of reads removed from this map because they are poorly modelled + */ + public List filterPoorlyModelledReads(final double maxErrorRatePerBase) { + final List removedReads = new LinkedList(); + final Iterator>> it = likelihoodReadMap.entrySet().iterator(); + while ( it.hasNext() ) { + final Map.Entry> record = it.next(); + if ( readIsPoorlyModelled(record.getKey(), record.getValue().values(), maxErrorRatePerBase) ) { + it.remove(); + removedReads.add(record.getKey()); + } + } + + return removedReads; + } + + /** + * Is this read poorly modelled by any of the alleles in this map? + * + * A read is poorly modeled when it's likelihood is below what would be expected for a read + * originating from one of the alleles given the maxErrorRatePerBase of the reads in general. + * + * This function makes a number of key assumptions. First, that the likelihoods reflect the total likelihood + * of the read. In other words, that the read would be fully explained by one of the alleles. This means + * that the allele should be something like the full haplotype from which the read might originate. + * + * It further assumes that each error in the read occurs with likelihood of -3 (Q30 confidence per base). So + * a read with a 10% error rate with Q30 bases that's 100 bp long we'd expect to see 10 real Q30 errors + * even against the true haplotype. So for this read to be well modelled by at least one allele we'd expect + * a likelihood to be >= 10 * -3. + * + * @param read the read we want to evaluate + * @param log10Likelihoods a list of the log10 likelihoods of the read against a set of haplotypes. + * @param maxErrorRatePerBase the maximum error rate we'd expect for this read per base, in real space. So + * 0.01 means a 1% error rate + * @return true if none of the log10 likelihoods imply that the read truly originated from one of the haplotypes + */ + protected boolean readIsPoorlyModelled(final GATKSAMRecord read, final Collection log10Likelihoods, final double maxErrorRatePerBase) { + final double maxErrorsForRead = Math.ceil(read.getReadLength() * maxErrorRatePerBase); + final double log10QualPerBase = -3.0; + final double log10MaxLikelihoodForTrueAllele = maxErrorsForRead * log10QualPerBase; + + for ( final double log10Likelihood : log10Likelihoods ) + if ( log10Likelihood >= log10MaxLikelihoodForTrueAllele ) + return false; + + return true; + } } From 5545c629f5680c3dce0fe414e19858970fc8a1d4 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Thu, 4 Apr 2013 10:24:12 -0400 Subject: [PATCH 15/23] Rename Utils to GraphUtils to avoid conflicts with the sting.Utils class; fix broken unit test in SharedVertexSequenceSplitterUnitTest --- .../haplotypecaller/graphs/CommonSuffixSplitter.java | 7 +++---- .../haplotypecaller/graphs/{Utils.java => GraphUtils.java} | 4 ++-- .../gatk/walkers/haplotypecaller/graphs/SeqGraph.java | 2 +- .../graphs/SharedVertexSequenceSplitter.java | 4 ++-- .../walkers/haplotypecaller/graphs/SeqGraphUnitTest.java | 7 ++++--- .../graphs/SharedVertexSequenceSplitterUnitTest.java | 4 ++-- 6 files changed, 14 insertions(+), 14 deletions(-) rename protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/{Utils.java => GraphUtils.java} (99%) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixSplitter.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixSplitter.java index dabfbb322..371d5b7e3 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixSplitter.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/CommonSuffixSplitter.java @@ -48,7 +48,6 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; import com.google.java.contract.Requires; -import java.io.File; import java.util.*; /** @@ -177,9 +176,9 @@ public class CommonSuffixSplitter { */ @Requires("!middleVertices.isEmpty()") protected static SeqVertex commonSuffix(final Collection middleVertices) { - final List kmers = Utils.getKmers(middleVertices); - final int min = Utils.minKmerLength(kmers); - final int suffixLen = Utils.compSuffixLen(kmers, min); + final List kmers = GraphUtils.getKmers(middleVertices); + final int min = GraphUtils.minKmerLength(kmers); + final int suffixLen = GraphUtils.compSuffixLen(kmers, min); final byte[] kmer = kmers.get(0); final byte[] suffix = Arrays.copyOfRange(kmer, kmer.length - suffixLen, kmer.length); return new SeqVertex(suffix); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Utils.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/GraphUtils.java similarity index 99% rename from protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Utils.java rename to protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/GraphUtils.java index 8cb272925..30c5be190 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Utils.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/GraphUtils.java @@ -60,8 +60,8 @@ import java.util.List; * Date: 3/25/13 * Time: 9:42 PM */ -final class Utils { - private Utils() {} +final class GraphUtils { + private GraphUtils() {} /** * Compute the maximum shared prefix length of list of bytes. diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java index 4cc7aae2a..97969d098 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java @@ -70,7 +70,7 @@ public final class SeqGraph extends BaseGraph { * merging inappropriate head or tail nodes, which introduces large insertion / deletion events * as the merge operation creates a link among the non-linked sink / source vertices */ - private final static int MIN_COMMON_SEQUENCE_TO_MERGE_SOURCE_SINK_VERTICES = 10; + protected final static int MIN_COMMON_SEQUENCE_TO_MERGE_SOURCE_SINK_VERTICES = 10; /** * Construct an empty SeqGraph diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitter.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitter.java index ca7faa444..f6ee4c3c3 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitter.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitter.java @@ -295,8 +295,8 @@ public class SharedVertexSequenceSplitter { min = Math.min(min, v.getSequence().length); } - final int prefixLen = Utils.compPrefixLen(kmers, min); - final int suffixLen = Utils.compSuffixLen(kmers, min - prefixLen); + final int prefixLen = GraphUtils.compPrefixLen(kmers, min); + final int suffixLen = GraphUtils.compSuffixLen(kmers, min - prefixLen); final byte[] kmer = kmers.get(0); final byte[] prefix = Arrays.copyOfRange(kmer, 0, prefixLen); diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraphUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraphUnitTest.java index ca43ced69..42137e4e4 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraphUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraphUnitTest.java @@ -47,6 +47,7 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs; import org.broadinstitute.sting.BaseTest; +import org.broadinstitute.sting.utils.Utils; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; @@ -195,8 +196,8 @@ public class SeqGraphUnitTest extends BaseTest { final SeqGraph graph = new SeqGraph(); - SeqVertex pre1 = new SeqVertex("ACT"); - SeqVertex pre2 = new SeqVertex("AGT"); + SeqVertex pre1 = new SeqVertex(Utils.dupString("A", SeqGraph.MIN_COMMON_SEQUENCE_TO_MERGE_SOURCE_SINK_VERTICES) + "CT"); + SeqVertex pre2 = new SeqVertex(Utils.dupString("A", SeqGraph.MIN_COMMON_SEQUENCE_TO_MERGE_SOURCE_SINK_VERTICES) + "GT"); SeqVertex top = new SeqVertex("A"); SeqVertex middle1 = new SeqVertex("GC"); SeqVertex middle2 = new SeqVertex("TC"); @@ -282,7 +283,7 @@ public class SeqGraphUnitTest extends BaseTest { final SeqVertex newMiddle1 = new SeqVertex("G"); final SeqVertex newMiddle2 = new SeqVertex("T"); final SeqVertex newBottom = new SeqVertex("C" + bottom.getSequenceString()); - final SeqVertex newTop = new SeqVertex("A"); + final SeqVertex newTop = new SeqVertex(Utils.dupString("A", SeqGraph.MIN_COMMON_SEQUENCE_TO_MERGE_SOURCE_SINK_VERTICES)); final SeqVertex newTopDown1 = new SeqVertex("G"); final SeqVertex newTopDown2 = new SeqVertex("C"); final SeqVertex newTopBottomMerged = new SeqVertex("TA"); diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitterUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitterUnitTest.java index 0930d497f..2df783b19 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitterUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedVertexSequenceSplitterUnitTest.java @@ -98,10 +98,10 @@ public class SharedVertexSequenceSplitterUnitTest extends BaseTest { min = Math.min(min, s.length()); } - final int actualPrefixLen = org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.Utils.compPrefixLen(bytes, min); + final int actualPrefixLen = GraphUtils.compPrefixLen(bytes, min); Assert.assertEquals(actualPrefixLen, expectedPrefixLen, "Failed prefix test"); - final int actualSuffixLen = org.broadinstitute.sting.gatk.walkers.haplotypecaller.graphs.Utils.compSuffixLen(bytes, min - actualPrefixLen); + final int actualSuffixLen = GraphUtils.compSuffixLen(bytes, min - actualPrefixLen); Assert.assertEquals(actualSuffixLen, expectedSuffixLen, "Failed suffix test"); } From 9c7a35f73fe5bcff75abbad10af3065bf589e381 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Thu, 4 Apr 2013 15:15:10 -0400 Subject: [PATCH 16/23] HaplotypeCaller no longer creates haplotypes that involve cycles in the SeqGraph -- The kbest paths algorithm now takes an explicit set of starting and ending vertices, which is conceptually cleaner and works for either the cycle or no-cycle models. Allowing cycles can be re-enabled with an HC command line switch. --- .../haplotypecaller/DeBruijnAssembler.java | 14 ++- .../haplotypecaller/HaplotypeCaller.java | 7 +- .../haplotypecaller/graphs/BaseGraph.java | 24 ++++ .../haplotypecaller/graphs/KBestPaths.java | 119 +++++++++++------- .../walkers/haplotypecaller/graphs/Path.java | 13 ++ .../graphs/KBestPathsUnitTest.java | 75 +++++++---- 6 files changed, 183 insertions(+), 69 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java index 40a6a79e0..11701a73b 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/DeBruijnAssembler.java @@ -95,22 +95,25 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { private final boolean debug; private final boolean debugGraphTransformations; private final int minKmer; + private final boolean allowCyclesInKmerGraphToGeneratePaths; private final int onlyBuildKmersOfThisSizeWhenDebuggingGraphAlgorithms; protected DeBruijnAssembler() { - this(false, -1, 11); + this(false, -1, 11, false); } public DeBruijnAssembler(final boolean debug, final int debugGraphTransformations, - final int minKmer) { + final int minKmer, + final boolean allowCyclesInKmerGraphToGeneratePaths) { super(); this.debug = debug; this.debugGraphTransformations = debugGraphTransformations > 0; this.onlyBuildKmersOfThisSizeWhenDebuggingGraphAlgorithms = debugGraphTransformations; this.minKmer = minKmer; + this.allowCyclesInKmerGraphToGeneratePaths = allowCyclesInKmerGraphToGeneratePaths; } /** @@ -388,7 +391,12 @@ public class DeBruijnAssembler extends LocalAssemblyEngine { } for( final SeqGraph graph : graphs ) { - for ( final Path path : new KBestPaths().getKBestPaths(graph, NUM_BEST_PATHS_PER_KMER_GRAPH) ) { + final SeqVertex source = graph.getReferenceSourceVertex(); + final SeqVertex sink = graph.getReferenceSinkVertex(); + if ( source == null || sink == null ) throw new IllegalArgumentException("Both source and sink cannot be null but got " + source + " and sink " + sink + " for graph "+ graph); + + final KBestPaths pathFinder = new KBestPaths(allowCyclesInKmerGraphToGeneratePaths); + for ( final Path path : pathFinder.getKBestPaths(graph, NUM_BEST_PATHS_PER_KMER_GRAPH, source, sink) ) { // logger.info("Found path " + path); Haplotype h = new Haplotype( path.getBases() ); if( !returnHaplotypes.contains(h) ) { diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index bce179ee1..80276f7be 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -314,6 +314,11 @@ public class HaplotypeCaller extends ActiveRegionWalker implem @Argument(fullName="trimActiveRegions", shortName="trimActiveRegions", doc="If specified, we will trim down the active region from the full region (active + extension) to just the active interval for genotyping", required = false) protected boolean trimActiveRegions = false; + @Hidden + @Argument(fullName="allowCyclesInKmerGraphToGeneratePaths", shortName="allowCyclesInKmerGraphToGeneratePaths", doc="If specified, we will allow cycles in the kmer graphs to generate paths with multiple copies of the path sequenece rather than just the shortest paths", required = false) + protected boolean allowCyclesInKmerGraphToGeneratePaths = false; + + // the UG engines private UnifiedGenotyperEngine UG_engine = null; private UnifiedGenotyperEngine UG_engine_simple_genotyper = null; @@ -424,7 +429,7 @@ public class HaplotypeCaller extends ActiveRegionWalker implem } // setup the assembler - assemblyEngine = new DeBruijnAssembler( DEBUG, debugGraphTransformations, minKmer); + assemblyEngine = new DeBruijnAssembler(DEBUG, debugGraphTransformations, minKmer, allowCyclesInKmerGraphToGeneratePaths); assemblyEngine.setErrorCorrectKmers(errorCorrectKmers); assemblyEngine.setPruneFactor(MIN_PRUNE_FACTOR); if ( graphWriter != null ) assemblyEngine.setGraphWriter(graphWriter); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java index 5d591fd5c..7ce57e2e7 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseGraph.java @@ -137,6 +137,30 @@ public class BaseGraph extends DefaultDirectedGraph getSources() { + final Set set = new LinkedHashSet(); + for ( final T v : vertexSet() ) + if ( isSource(v) ) + set.add(v); + return set; + } + + /** + * Get the set of sink vertices of this graph + * @return a non-null set + */ + public Set getSinks() { + final Set set = new LinkedHashSet(); + for ( final T v : vertexSet() ) + if ( isSink(v) ) + set.add(v); + return set; + } + /** * Pull out the additional sequence implied by traversing this node in the graph * @param v the vertex from which to pull out the additional base sequence diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/KBestPaths.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/KBestPaths.java index 1dc712c67..466148588 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/KBestPaths.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/KBestPaths.java @@ -50,10 +50,7 @@ import com.google.common.collect.MinMaxPriorityQueue; import com.google.java.contract.Ensures; import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; +import java.util.*; /** * Class for finding the K best paths (as determined by the sum of multiplicities of the edges) in a graph. @@ -63,7 +60,23 @@ import java.util.List; * Date: Mar 23, 2011 */ public class KBestPaths { - public KBestPaths() { } + private final boolean allowCycles; + + /** + * Create a new KBestPaths finder that follows cycles in the graph + */ + public KBestPaths() { + this(true); + } + + /** + * Create a new KBestPaths finder + * + * @param allowCycles should we allow paths that follow cycles in the graph? + */ + public KBestPaths(final boolean allowCycles) { + this.allowCycles = allowCycles; + } protected static class MyInt { public int val = 0; } @@ -78,31 +91,61 @@ public class KBestPaths { } /** - * @see #getKBestPaths(BaseGraph, int) retriving the first 1000 paths + * @see #getKBestPaths(BaseGraph, int) retriving the best 1000 paths */ public List> getKBestPaths( final BaseGraph graph ) { return getKBestPaths(graph, 1000); } /** - * Traverse the graph and pull out the best k paths. - * Paths are scored via their comparator function. The default being PathComparatorTotalScore() - * @param graph the graph from which to pull paths - * @param k the number of paths to find - * @return a list with at most k top-scoring paths from the graph + * @see #getKBestPaths(BaseGraph, int, java.util.Set, java.util.Set) retriving the first 1000 paths + * starting from all source vertices and ending with all sink vertices */ - @Ensures({"result != null", "result.size() <= k"}) public List> getKBestPaths( final BaseGraph graph, final int k ) { + return getKBestPaths(graph, k, graph.getSources(), graph.getSinks()); + } + + /** + * @see #getKBestPaths(BaseGraph, int, java.util.Set, java.util.Set) with k=1000 + */ + public List> getKBestPaths( final BaseGraph graph, final Set sources, final Set sinks ) { + return getKBestPaths(graph, 1000, sources, sinks); + } + + /** + * @see #getKBestPaths(BaseGraph, int, java.util.Set, java.util.Set) with k=1000 + */ + public List> getKBestPaths( final BaseGraph graph, final T source, final T sink ) { + return getKBestPaths(graph, 1000, source, sink); + } + + /** + * @see #getKBestPaths(BaseGraph, int, java.util.Set, java.util.Set) with singleton source and sink sets + */ + public List> getKBestPaths( final BaseGraph graph, final int k, final T source, final T sink ) { + return getKBestPaths(graph, k, Collections.singleton(source), Collections.singleton(sink)); + } + + /** + * Traverse the graph and pull out the best k paths. + * Paths are scored via their comparator function. The default being PathComparatorTotalScore() + * @param graph the graph from which to pull paths + * @param k the number of paths to find + * @param sources a set of vertices we want to start paths with + * @param sinks a set of vertices we want to end paths with + * @return a list with at most k top-scoring paths from the graph + */ + @Ensures({"result != null", "result.size() <= k"}) + public List> getKBestPaths( final BaseGraph graph, final int k, final Set sources, final Set sinks ) { if( graph == null ) { throw new IllegalArgumentException("Attempting to traverse a null graph."); } // a min max queue that will collect the best k paths final MinMaxPriorityQueue> bestPaths = MinMaxPriorityQueue.orderedBy(new PathComparatorTotalScore()).maximumSize(k).create(); // run a DFS for best paths - for ( final T v : graph.vertexSet() ) { - if ( graph.inDegreeOf(v) == 0 ) { - findBestPaths(new Path(v, graph), bestPaths, new MyInt()); - } + for ( final T source : sources ) { + final Path startingPath = new Path(source, graph); + findBestPaths(startingPath, sinks, bestPaths, new MyInt()); } // the MinMaxPriorityQueue iterator returns items in an arbitrary order, so we need to sort the final result @@ -111,9 +154,15 @@ public class KBestPaths { return toReturn; } - private void findBestPaths( final Path path, final MinMaxPriorityQueue> bestPaths, final MyInt n ) { - // did we hit the end of a path? - if ( allOutgoingEdgesHaveBeenVisited(path) ) { + /** + * Recursive algorithm to find the K best paths in the graph from the current path to any of the sinks + * @param path the current path progress + * @param sinks a set of nodes that are sinks. Will terminate and add a path if the last vertex of path is in this set + * @param bestPaths a path to collect completed paths. + * @param n used to limit the search by tracking the number of vertices visited across all paths + */ + private void findBestPaths( final Path path, final Set sinks, final Collection> bestPaths, final MyInt n ) { + if ( sinks.contains(path.getLastVertex())) { bestPaths.add(path); } else if( n.val > 10000 ) { // do nothing, just return, as we've done too much work already @@ -122,31 +171,15 @@ public class KBestPaths { final ArrayList edgeArrayList = new ArrayList(path.getOutgoingEdgesOfLastVertex()); Collections.sort(edgeArrayList, new BaseEdge.EdgeWeightComparator()); for ( final BaseEdge edge : edgeArrayList ) { + final T target = path.getGraph().getEdgeTarget(edge); // make sure the edge is not already in the path - if ( path.containsEdge(edge) ) - continue; - - final Path newPath = new Path(path, edge); - n.val++; - findBestPaths(newPath, bestPaths, n); + final boolean alreadyVisited = allowCycles ? path.containsEdge(edge) : path.containsVertex(target); + if ( ! alreadyVisited ) { + final Path newPath = new Path(path, edge); + n.val++; + findBestPaths(newPath, sinks, bestPaths, n); + } } } } - - /** - * Have all of the outgoing edges of the final vertex been visited? - * - * I.e., are all outgoing vertices of the current path in the list of edges of the graph? - * - * @param path the path to test - * @return true if all the outgoing edges at the end of this path have already been visited - */ - private boolean allOutgoingEdgesHaveBeenVisited( final Path path ) { - for( final BaseEdge edge : path.getOutgoingEdgesOfLastVertex() ) { - if( !path.containsEdge(edge) ) { // TODO -- investigate allowing numInPath < 2 to allow cycles - return false; - } - } - return true; - } -} +} \ No newline at end of file diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java index 50ca91d41..252ae3449 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java @@ -148,6 +148,19 @@ public class Path { return edgesAsSet.contains(edge); } + /** + * Does this path contain the given vertex? + * + * @param v a non-null vertex + * @return true if v occurs within this path, false otherwise + */ + public boolean containsVertex(final T v) { + if ( v == null ) throw new IllegalArgumentException("Vertex cannot be null"); + + // TODO -- warning this is expense. Need to do vertex caching + return getVertices().contains(v); + } + /** * Check that two paths have the same edges and total score * @param path the other path we might be the same as diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/KBestPathsUnitTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/KBestPathsUnitTest.java index d20a0f778..3c6327842 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/KBestPathsUnitTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/KBestPathsUnitTest.java @@ -55,10 +55,7 @@ import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.LinkedList; -import java.util.List; +import java.util.*; /** * Created with IntelliJ IDEA. @@ -70,15 +67,13 @@ public class KBestPathsUnitTest { @DataProvider(name = "BasicPathFindingData") public Object[][] makeBasicPathFindingData() { List tests = new ArrayList(); -// for ( final int nStartNodes : Arrays.asList(1) ) { -// for ( final int nBranchesPerBubble : Arrays.asList(2) ) { -// for ( final int nEndNodes : Arrays.asList(1) ) { -// for ( final boolean addCycle : Arrays.asList(true) ) { - for ( final int nStartNodes : Arrays.asList(1, 2, 3) ) { - for ( final int nBranchesPerBubble : Arrays.asList(2, 3) ) { - for ( final int nEndNodes : Arrays.asList(1, 2, 3) ) { - for ( final boolean addCycle : Arrays.asList(true, false) ) { - tests.add(new Object[]{nStartNodes, nBranchesPerBubble, nEndNodes, addCycle}); + for ( final boolean allowCycles : Arrays.asList(false, true)) { + for ( final int nStartNodes : Arrays.asList(1, 2, 3) ) { + for ( final int nBranchesPerBubble : Arrays.asList(2, 3) ) { + for ( final int nEndNodes : Arrays.asList(1, 2, 3) ) { + for ( final boolean addCycle : Arrays.asList(true, false) ) { + tests.add(new Object[]{nStartNodes, nBranchesPerBubble, nEndNodes, addCycle, allowCycles}); + } } } } @@ -88,9 +83,9 @@ public class KBestPathsUnitTest { } private static int weight = 1; - final List createVertices(final SeqGraph graph, final int n, final SeqVertex source, final SeqVertex target) { + final Set createVertices(final SeqGraph graph, final int n, final SeqVertex source, final SeqVertex target) { final List seqs = Arrays.asList("A", "C", "G", "T"); - final List vertices = new LinkedList(); + final Set vertices = new LinkedHashSet(); for ( int i = 0; i < n; i++ ) { final SeqVertex v = new SeqVertex(seqs.get(i)); graph.addVertex(v); @@ -102,22 +97,22 @@ public class KBestPathsUnitTest { } @Test(dataProvider = "BasicPathFindingData", enabled = true) - public void testBasicPathFinding(final int nStartNodes, final int nBranchesPerBubble, final int nEndNodes, final boolean addCycle) { + public void testBasicPathFinding(final int nStartNodes, final int nBranchesPerBubble, final int nEndNodes, final boolean addCycle, final boolean allowCycles) { SeqGraph graph = new SeqGraph(); final SeqVertex middleTop = new SeqVertex("GTAC"); final SeqVertex middleBottom = new SeqVertex("ACTG"); graph.addVertices(middleTop, middleBottom); - final List starts = createVertices(graph, nStartNodes, null, middleTop); - final List bubbles = createVertices(graph, nBranchesPerBubble, middleTop, middleBottom); - final List ends = createVertices(graph, nEndNodes, middleBottom, null); + final Set starts = createVertices(graph, nStartNodes, null, middleTop); + final Set bubbles = createVertices(graph, nBranchesPerBubble, middleTop, middleBottom); + final Set ends = createVertices(graph, nEndNodes, middleBottom, null); if ( addCycle ) graph.addEdge(middleBottom, middleBottom); // enumerate all possible paths - final List> paths = new KBestPaths().getKBestPaths(graph); + final List> paths = new KBestPaths(allowCycles).getKBestPaths(graph, starts, ends); - final int expectedNumOfPaths = nStartNodes * nBranchesPerBubble * (addCycle ? 2 : 1) * nEndNodes; + final int expectedNumOfPaths = nStartNodes * nBranchesPerBubble * (addCycle && allowCycles ? 2 : 1) * nEndNodes; Assert.assertEquals(paths.size(), expectedNumOfPaths, "Didn't find the expected number of paths"); int lastScore = Integer.MAX_VALUE; @@ -128,11 +123,47 @@ public class KBestPathsUnitTest { // get the best path, and make sure it's the same as our optimal path overall final Path best = paths.get(0); - final List> justOne = new KBestPaths().getKBestPaths(graph, 1); + final List> justOne = new KBestPaths(allowCycles).getKBestPaths(graph, 1, starts, ends); Assert.assertEquals(justOne.size(), 1); Assert.assertTrue(justOne.get(0).pathsAreTheSame(best), "Best path from complete enumerate " + best + " not the same as from k = 1 search " + justOne.get(0)); } + @Test + public void testPathFindingComplexCycle() { + SeqGraph graph = new SeqGraph(); + + final SeqVertex v1 = new SeqVertex("A"); + final SeqVertex v2 = new SeqVertex("C"); + final SeqVertex v3 = new SeqVertex("G"); + final SeqVertex v4 = new SeqVertex("T"); + final SeqVertex v5 = new SeqVertex("AA"); + graph.addVertices(v1, v2, v3, v4, v5); + graph.addEdges(v1, v2, v3, v4, v5); + graph.addEdges(v3, v3); + graph.addEdges(v4, v2); + + // enumerate all possible paths + final List> paths = new KBestPaths(false).getKBestPaths(graph, v1, v5); + + Assert.assertEquals(paths.size(), 1, "Didn't find the expected number of paths"); + } + + @Test + public void testPathFindingCycleLastNode() { + SeqGraph graph = new SeqGraph(); + + final SeqVertex v1 = new SeqVertex("A"); + final SeqVertex v2 = new SeqVertex("C"); + final SeqVertex v3 = new SeqVertex("G"); + graph.addVertices(v1, v2, v3); + graph.addEdges(v1, v2, v3, v3); + + // enumerate all possible paths + final List> paths = new KBestPaths(false).getKBestPaths(graph, v1, v3); + + Assert.assertEquals(paths.size(), 1, "Didn't find the expected number of paths"); + } + @DataProvider(name = "BasicBubbleDataProvider") public Object[][] makeBasicBubbleDataProvider() { List tests = new ArrayList(); From 3a19266843788c17a2fa8d7e3bf9fb55d18f277b Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Thu, 4 Apr 2013 16:07:00 -0400 Subject: [PATCH 17/23] Fix residual merge conflicts --- .../haplotypecaller/graphs/SeqGraph.java | 6 +-- .../graphs/SharedSequenceMerger.java | 2 +- .../sting/utils/haplotype/EventMap.java | 46 +++++++++---------- 3 files changed, 27 insertions(+), 27 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java index 97969d098..8c78d8515 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SeqGraph.java @@ -350,7 +350,7 @@ public final class SeqGraph extends BaseGraph { protected class MergeDiamonds extends VertexBasedTransformer { @Override protected boolean tryToTransform(final SeqVertex top) { - final List middles = outgoingVerticesOf(top); + final Set middles = outgoingVerticesOf(top); if ( middles.size() <= 1 ) // we can only merge if there's at least two middle nodes return false; @@ -407,7 +407,7 @@ public final class SeqGraph extends BaseGraph { protected class MergeTails extends VertexBasedTransformer { @Override protected boolean tryToTransform(final SeqVertex top) { - final List tails = outgoingVerticesOf(top); + final Set tails = outgoingVerticesOf(top); if ( tails.size() <= 1 ) return false; @@ -495,7 +495,7 @@ public final class SeqGraph extends BaseGraph { protected class MergeHeadlessIncomingSources extends VertexBasedTransformer { @Override boolean tryToTransform(final SeqVertex bottom) { - final List incoming = incomingVerticesOf(bottom); + final Set incoming = incomingVerticesOf(bottom); if ( incoming.size() <= 1 ) return false; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java index 28734e505..1c53f2332 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/SharedSequenceMerger.java @@ -75,7 +75,7 @@ public class SharedSequenceMerger { if ( graph == null ) throw new IllegalArgumentException("graph cannot be null"); if ( ! graph.vertexSet().contains(v) ) throw new IllegalArgumentException("graph doesn't contain vertex " + v); - final List prevs = graph.incomingVerticesOf(v); + final Set prevs = graph.incomingVerticesOf(v); if ( ! canMerge(graph, v, prevs) ) return false; else { diff --git a/public/java/src/org/broadinstitute/sting/utils/haplotype/EventMap.java b/public/java/src/org/broadinstitute/sting/utils/haplotype/EventMap.java index ab5f23894..752c880b9 100644 --- a/public/java/src/org/broadinstitute/sting/utils/haplotype/EventMap.java +++ b/public/java/src/org/broadinstitute/sting/utils/haplotype/EventMap.java @@ -1,27 +1,27 @@ /* -* Copyright (c) 2012 The Broad Institute -* -* Permission is hereby granted, free of charge, to any person -* obtaining a copy of this software and associated documentation -* files (the "Software"), to deal in the Software without -* restriction, including without limitation the rights to use, -* copy, modify, merge, publish, distribute, sublicense, and/or sell -* copies of the Software, and to permit persons to whom the -* Software is furnished to do so, subject to the following -* conditions: -* -* The above copyright notice and this permission notice shall be -* included in all copies or substantial portions of the Software. -* -* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR -* THE USE OR OTHER DEALINGS IN THE SOFTWARE. -*/ + * Copyright (c) 2012 The Broad Institute + * + * Permission is hereby granted, free of charge, to any person + * obtaining a copy of this software and associated documentation + * files (the "Software"), to deal in the Software without + * restriction, including without limitation the rights to use, + * copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following + * conditions: + * + * The above copyright notice and this permission notice shall be + * included in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR + * THE USE OR OTHER DEALINGS IN THE SOFTWARE. + */ package org.broadinstitute.sting.utils.haplotype; From 5a54a4155a12b7a9a4531b0f635f4074cead7784 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Thu, 4 Apr 2013 20:29:17 -0400 Subject: [PATCH 18/23] Change key Haplotype default parameter values -- Extension increased to 200 bp -- Min prune factor defaults to 0 -- LD merging enabled by default for complex variants, only when there are 10+ samples for SNP + SNP merging -- Active region trimming enabled by default --- .../haplotypecaller/HaplotypeCaller.java | 23 +++++++++---------- 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index 80276f7be..a7aeadde6 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -137,7 +137,7 @@ import java.util.*; @DocumentedGATKFeature( groupName = HelpConstants.DOCS_CAT_VARDISC, extraDocs = {CommandLineGATK.class} ) @PartitionBy(PartitionType.LOCUS) @BAQMode(ApplicationTime = ReadTransformer.ApplicationTime.FORBIDDEN) -@ActiveRegionTraversalParameters(extension=85, maxRegion=300) +@ActiveRegionTraversalParameters(extension=200, maxRegion=300) @ReadFilters({HCMappingQualityFilter.class}) @Downsample(by= DownsampleType.BY_SAMPLE, toCoverage=250) public class HaplotypeCaller extends ActiveRegionWalker implements AnnotatorCompatible { @@ -200,7 +200,7 @@ public class HaplotypeCaller extends ActiveRegionWalker implem @Advanced @Argument(fullName="minPruning", shortName="minPruning", doc = "The minimum allowed pruning factor in assembly graph. Paths with <= X supporting kmers are pruned from the graph", required = false) - protected int MIN_PRUNE_FACTOR = 1; + protected int MIN_PRUNE_FACTOR = 0; @Advanced @Argument(fullName="gcpHMM", shortName="gcpHMM", doc="Flat gap continuation penalty for use in the Pair HMM", required = false) @@ -284,6 +284,10 @@ public class HaplotypeCaller extends ActiveRegionWalker implem @Argument(fullName="excludeAnnotation", shortName="XA", doc="One or more specific annotations to exclude", required=false) protected List annotationsToExclude = new ArrayList(Arrays.asList(new String[]{"SpanningDeletions", "TandemRepeatAnnotator"})); + @Advanced + @Argument(fullName="dontMergeVariantsViaLD", shortName="dontMergeVariantsViaLD", doc="If specified, we will include low quality bases when doing the assembly", required = false) + protected boolean dontMergeVariantsViaLD = false; + /** * Which groups of annotations to add to the output VCF file. See the VariantAnnotator -list argument to view available groups. */ @@ -301,18 +305,13 @@ public class HaplotypeCaller extends ActiveRegionWalker implem @Argument(fullName="debugGraphTransformations", shortName="debugGraphTransformations", doc="If specified, we will write DOT formatted graph files out of the assembler for only this graph size", required = false) protected int debugGraphTransformations = -1; - // TODO -- not currently useful - @Hidden + @Hidden // TODO -- not currently useful @Argument(fullName="useLowQualityBasesForAssembly", shortName="useLowQualityBasesForAssembly", doc="If specified, we will include low quality bases when doing the assembly", required = false) protected boolean useLowQualityBasesForAssembly = false; @Hidden - @Argument(fullName="useNewLDMerger", shortName="useNewLDMerger", doc="If specified, we will include low quality bases when doing the assembly", required = false) - protected boolean useNewLDMerger = false; - - @Hidden - @Argument(fullName="trimActiveRegions", shortName="trimActiveRegions", doc="If specified, we will trim down the active region from the full region (active + extension) to just the active interval for genotyping", required = false) - protected boolean trimActiveRegions = false; + @Argument(fullName="dontTrimActiveRegions", shortName="donTrimActiveRegions", doc="If specified, we will not trim down the active region from the full region (active + extension) to just the active interval for genotyping", required = false) + protected boolean dontTrimActiveRegions = false; @Hidden @Argument(fullName="allowCyclesInKmerGraphToGeneratePaths", shortName="allowCyclesInKmerGraphToGeneratePaths", doc="If specified, we will allow cycles in the kmer graphs to generate paths with multiple copies of the path sequenece rather than just the shortest paths", required = false) @@ -437,7 +436,7 @@ public class HaplotypeCaller extends ActiveRegionWalker implem likelihoodCalculationEngine = new LikelihoodCalculationEngine( (byte)gcpHMM, DEBUG, pairHMM ); - final LDMerger ldMerger = new LDMerger(DEBUG, useNewLDMerger ? 10 : 10, useNewLDMerger ? 1 : 10); + final LDMerger ldMerger = new LDMerger(DEBUG, dontMergeVariantsViaLD ? 10000000 : 10, dontMergeVariantsViaLD ? 10000000 : 1); genotypingEngine = new GenotypingEngine( DEBUG, annotationEngine, USE_FILTERED_READ_MAP_FOR_ANNOTATIONS, ldMerger ); @@ -640,7 +639,7 @@ public class HaplotypeCaller extends ActiveRegionWalker implem final List haplotypes = assemblyEngine.runLocalAssembly( activeRegion, referenceHaplotype, fullReferenceWithPadding, paddedReferenceLoc, activeAllelesToGenotype ); - if ( trimActiveRegions ) { + if ( ! dontTrimActiveRegions ) { return trimActiveRegion(activeRegion, haplotypes, fullReferenceWithPadding, paddedReferenceLoc); } else { // we don't want to or cannot create a trimmed active region, so go ahead and use the old one From 6d22485a4cd3fa715a66c330b176ede0a42017b4 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Fri, 5 Apr 2013 13:50:03 -0400 Subject: [PATCH 20/23] Critical bugfix to ReduceRead functionality of the GATKSAMRecord -- The function getReducedCounts() was returning the undecoded reduced read tag, which looks like [10, 5, -1, -5] when the depths were [10, 15, 9, 5]. The only function that actually gave the real counts was getReducedCount(int i) which did the proper decoding. Now GATKSAMRecord decodes the tag into the proper depths vector so that getReduceCounts() returns what one reasonably expects it to, and getReduceCount(i) merely looks up the value at i. Added unit test to ensure this behavior going forward. -- Changed the name of setReducedCounts() to setReducedCountsTag as this function assumes that counts have already been encoded in the tag way. --- .../reducereads/SyntheticRead.java | 2 +- .../sting/utils/sam/GATKSAMRecord.java | 61 +++++++++++++++++-- .../utils/sam/GATKSAMRecordUnitTest.java | 8 +++ 3 files changed, 64 insertions(+), 7 deletions(-) diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SyntheticRead.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SyntheticRead.java index b1ac19f50..ae4366768 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SyntheticRead.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/compression/reducereads/SyntheticRead.java @@ -235,7 +235,7 @@ public class SyntheticRead { read.setReadBases(convertReadBases()); read.setMappingQuality((int) Math.ceil(mappingQuality / basesCountsQuals.size())); read.setReadGroup(readGroupRecord); - read.setReducedReadCounts(convertBaseCounts()); + read.setReducedReadCountsTag(convertBaseCounts()); if (hasIndelQualities) { read.setBaseQualities(convertInsertionQualities(), EventType.BASE_INSERTION); diff --git a/public/java/src/org/broadinstitute/sting/utils/sam/GATKSAMRecord.java b/public/java/src/org/broadinstitute/sting/utils/sam/GATKSAMRecord.java index 01f39a67b..0e672b3d7 100644 --- a/public/java/src/org/broadinstitute/sting/utils/sam/GATKSAMRecord.java +++ b/public/java/src/org/broadinstitute/sting/utils/sam/GATKSAMRecord.java @@ -345,24 +345,50 @@ public class GATKSAMRecord extends BAMRecord { // *** ReduceReads functions ***// /////////////////////////////////////////////////////////////////////////////// + /** + * Get the counts of the bases in this reduced read + * + * NOTE that this is not the value of the REDUCED_READ_CONSENSUS_TAG, which + * is encoded in a special way. This is the actual positive counts of the + * depth at each bases. So for a RR with a tag of: + * + * [10, 5, -1, -5] + * + * this function returns + * + * [10, 15, 9, 5] + * + * as one might expect. + * + * @return a byte[] holding the depth of the bases in this reduced read, or null if this isn't a reduced read + */ public byte[] getReducedReadCounts() { if ( ! retrievedReduceReadCounts ) { - reducedReadCounts = getByteArrayAttribute(REDUCED_READ_CONSENSUS_TAG); + final byte[] tag = getByteArrayAttribute(REDUCED_READ_CONSENSUS_TAG); + if ( tag != null ) reducedReadCounts = decodeReadReadCounts(tag); retrievedReduceReadCounts = true; } return reducedReadCounts; } + /** + * Is this read a reduced read? + * @return true if yes + */ public boolean isReducedRead() { return getReducedReadCounts() != null; } /** - * Set the reduced read counts for this record to counts + * Set the reduced read counts tag for this record to counts + * + * WARNING -- this function assumes that counts is encoded as a difference in value count + * of count[i] - count[0]. It is not a straight counting of the bases in the read. + * * @param counts the count array */ - public void setReducedReadCounts(final byte[] counts) { + public void setReducedReadCountsTag(final byte[] counts) { retrievedReduceReadCounts = false; setAttribute(REDUCED_READ_CONSENSUS_TAG, counts); } @@ -374,9 +400,32 @@ public class GATKSAMRecord extends BAMRecord { * @return the number of bases corresponding to the i'th base of the reduced read */ public final byte getReducedCount(final int i) { - byte firstCount = getReducedReadCounts()[0]; - byte offsetCount = getReducedReadCounts()[i]; - return (i==0) ? firstCount : (byte) Math.min(firstCount + offsetCount, Byte.MAX_VALUE); + return getReducedReadCounts()[i]; + } + + /** + * Actually decode the consensus tag of a reduce read, returning a newly allocated + * set of values countsFromTag to be the real depth of cover at each base of the reduced read. + * + * for example, if the tag contains [10, 5, -1, -5], after running this function the + * byte[] will contain the true counts [10, 15, 9, 5]. + * + * as one might expect. + * + * @param countsFromTag a non-null byte[] containing the tag encoded reduce reads counts + * @return a non-null byte[] containing the true depth values for the vector + */ + private byte[] decodeReadReadCounts(final byte[] countsFromTag) { + final int n = countsFromTag.length; + final byte[] result = new byte[n]; + final byte firstCount = countsFromTag[0]; + result[0] = firstCount; + for ( int i = 1; i < n; i++) { + final byte offsetCount = countsFromTag[i]; + result[i] = (byte) Math.min(firstCount + offsetCount, Byte.MAX_VALUE); + } + + return result; } /////////////////////////////////////////////////////////////////////////////// diff --git a/public/java/test/org/broadinstitute/sting/utils/sam/GATKSAMRecordUnitTest.java b/public/java/test/org/broadinstitute/sting/utils/sam/GATKSAMRecordUnitTest.java index 18a501b51..57a7946ae 100644 --- a/public/java/test/org/broadinstitute/sting/utils/sam/GATKSAMRecordUnitTest.java +++ b/public/java/test/org/broadinstitute/sting/utils/sam/GATKSAMRecordUnitTest.java @@ -134,4 +134,12 @@ public class GATKSAMRecordUnitTest extends BaseTest { read.setIsStrandless(true); read.setReadNegativeStrandFlag(true); } + + @Test + public void testGetReducedCountsIsCorrect() { + final byte[] counts = reducedRead.getReducedReadCounts(); + Assert.assertNotSame(counts, reducedRead.getAttribute(GATKSAMRecord.REDUCED_READ_CONSENSUS_TAG)); + for ( int i = 0; i < counts.length; i++ ) + Assert.assertEquals(counts[i], reducedRead.getReducedCount(i), "Reduced counts vector not equal to getReducedCount(i) at " + i); + } } From caf15fb7276485f11336e292e1a44dc912c0c70f Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Fri, 5 Apr 2013 14:11:06 -0400 Subject: [PATCH 21/23] Update MD5s to reflect new HC algorithms and parameter values --- ...lexAndSymbolicVariantsIntegrationTest.java | 6 +++--- .../HaplotypeCallerIntegrationTest.java | 21 +++++++++++-------- 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java index a891220c5..ff2b3d0b6 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest.java @@ -64,7 +64,7 @@ public class HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest extends Wa @Test public void testHaplotypeCallerMultiSampleComplex() { - HCTestComplexVariants(privateTestDir + "AFR.complex.variants.bam", "", "7b67ac6213b7a6f759057fb9d7148fdc"); + HCTestComplexVariants(privateTestDir + "AFR.complex.variants.bam", "", "80b9280b1e65952f60ba2fd738d4840f"); } private void HCTestSymbolicVariants(String bam, String args, String md5) { @@ -88,12 +88,12 @@ public class HaplotypeCallerComplexAndSymbolicVariantsIntegrationTest extends Wa @Test public void testHaplotypeCallerMultiSampleGGAComplex() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:119673-119823 -L 20:121408-121538", - "eb41ed6f1d692368a0f67311d139a38a"); + "125e93deeb3b390a14d9b777aa2a220f"); } @Test public void testHaplotypeCallerMultiSampleGGAMultiAllelic() { HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:133041-133161 -L 20:300207-300337", - "c4c33c962aca12c51def9b8cde35b7d2"); + "6957fd0e8a5bc66d2572a6ca8626fa7a"); } } diff --git a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java index 51c3296ac..5fc8c5622 100644 --- a/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java +++ b/protected/java/test/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCallerIntegrationTest.java @@ -47,12 +47,15 @@ package org.broadinstitute.sting.gatk.walkers.haplotypecaller; import net.sf.picard.reference.IndexedFastaSequenceFile; +import org.broad.tribble.TribbleIndexedFeatureReader; import org.broadinstitute.sting.WalkerTest; +import org.broadinstitute.sting.gatk.GenomeAnalysisEngine; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLocParser; import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.sting.utils.variant.GATKVCFUtils; import org.broadinstitute.variant.variantcontext.VariantContext; +import org.broadinstitute.variant.vcf.VCFCodec; import org.testng.annotations.Test; import java.io.File; @@ -77,12 +80,12 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSample() { - HCTest(CEUTRIO_BAM, "", "f132843e3c8e065a783cc4fdf9ee5df3"); + HCTest(CEUTRIO_BAM, "", "6fa37c449a800bcd59069be03ad2fff2"); } @Test public void testHaplotypeCallerSingleSample() { - HCTest(NA12878_BAM, "", "15e0201f5c478310d278d2d03483c152"); + HCTest(NA12878_BAM, "", "6140447b34bd1d08b3ed4d473d2c2f23"); } @Test(enabled = false) // can't annotate the rsID's yet @@ -93,7 +96,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerMultiSampleGGA() { HCTest(CEUTRIO_BAM, "--max_alternate_alleles 3 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles " + validationDataLocation + "combined.phase1.chr20.raw.indels.sites.vcf", - "48d309aed0cdc40cc983eeb5a8d12f53"); + "cbd119f3d37a9af0b3539c13b8053bd9"); } @Test @@ -109,7 +112,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerSingleSampleIndelQualityScores() { - HCTestIndelQualityScores(NA12878_RECALIBRATED_BAM, "", "34c7fcfe17a1d835e2dc403df9eb3591"); + HCTestIndelQualityScores(NA12878_RECALIBRATED_BAM, "", "9eeeada2f7145adfe08f538aad704982"); } private void HCTestNearbySmallIntervals(String bam, String args, String md5) { @@ -146,7 +149,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void testHaplotypeCallerNearbySmallIntervals() { - HCTestNearbySmallIntervals(NA12878_BAM, "", "eae65d20836d6c6ebca9e25e33566f74"); + HCTestNearbySmallIntervals(NA12878_BAM, "", "16ecd2f282bcb10dc32e7f3fe714a000"); } // This problem bam came from a user on the forum and it spotted a problem where the ReadClipper @@ -156,14 +159,14 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { @Test public void HCTestProblematicReadsModifiedInActiveRegions() { final String base = String.format("-T HaplotypeCaller -R %s -I %s", REF, privateTestDir + "haplotype-problem-4.bam") + " --no_cmdline_in_header -o %s -minPruning 3 -L 4:49139026-49139965"; - final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("a3d74040a4966bf7a04cbd4924970685")); + final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("0689d2c202849fd05617648eaf429b9a")); executeTest("HCTestProblematicReadsModifiedInActiveRegions: ", spec); } @Test public void HCTestStructuralIndels() { final String base = String.format("-T HaplotypeCaller -R %s -I %s", REF, privateTestDir + "AFR.structural.indels.bam") + " --no_cmdline_in_header -o %s -minPruning 6 -L 20:8187565-8187800 -L 20:18670537-18670730"; - final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("40da88ed3722c512264b72db37f18720")); + final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("e8466846ca420bcbcd52b97f7a661aa3")); executeTest("HCTestStructuralIndels: ", spec); } @@ -185,7 +188,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void HCTestReducedBam() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "bamExample.ReducedRead.ADAnnotation.bam -o %s -L 1:67,225,396-67,288,518", 1, - Arrays.asList("69b83d578c14ed32d08ce4e7ff8a8a18")); + Arrays.asList("e30b974b038293841e6be23c93ce76e1")); executeTest("HC calling on a ReducedRead BAM", spec); } @@ -193,7 +196,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest { public void testReducedBamWithReadsNotFullySpanningDeletion() { WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( "-T HaplotypeCaller -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "reduced.readNotFullySpanningDeletion.bam -o %s -L 1:167871297", 1, - Arrays.asList("0cae60d86a3f86854699217a30ece3e3")); + Arrays.asList("a913849c7ebdefb23ef9fa5ec05960fd")); executeTest("test calling on a ReducedRead BAM where the reads do not fully span a deletion", spec); } } From 21410690a224f96f1e949e316f8b7976f8fdaa41 Mon Sep 17 00:00:00 2001 From: Mark DePristo Date: Sat, 6 Apr 2013 14:08:26 -0400 Subject: [PATCH 22/23] Address reviewer comments --- .../haplotypecaller/GenotypingEngine.java | 12 +-- .../haplotypecaller/HaplotypeCaller.java | 11 ++- .../haplotypecaller/graphs/BaseVertex.java | 5 +- .../walkers/haplotypecaller/graphs/Path.java | 2 +- .../sting/utils/haplotype/LDMerger.java | 14 ++-- .../MergeVariantsAcrossHaplotypes.java | 79 +++++++++++++++++++ .../genotyper/PerReadAlleleLikelihoodMap.java | 2 +- 7 files changed, 103 insertions(+), 22 deletions(-) create mode 100644 protected/java/src/org/broadinstitute/sting/utils/haplotype/MergeVariantsAcrossHaplotypes.java diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java index abd502c2b..5fe98649f 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/GenotypingEngine.java @@ -59,7 +59,7 @@ import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; import org.broadinstitute.sting.utils.haplotype.EventMap; import org.broadinstitute.sting.utils.haplotype.Haplotype; -import org.broadinstitute.sting.utils.haplotype.LDMerger; +import org.broadinstitute.sting.utils.haplotype.MergeVariantsAcrossHaplotypes; import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.sting.utils.variant.GATKVariantContextUtils; import org.broadinstitute.variant.variantcontext.*; @@ -74,16 +74,16 @@ public class GenotypingEngine { private final boolean USE_FILTERED_READ_MAP_FOR_ANNOTATIONS; private final static List noCall = new ArrayList(); // used to noCall all genotypes until the exact model is applied private final VariantAnnotatorEngine annotationEngine; - private final LDMerger ldMerger; + private final MergeVariantsAcrossHaplotypes crossHaplotypeEventMerger; public GenotypingEngine( final boolean DEBUG, final VariantAnnotatorEngine annotationEngine, final boolean USE_FILTERED_READ_MAP_FOR_ANNOTATIONS, - final LDMerger ldMerger) { + final MergeVariantsAcrossHaplotypes crossHaplotypeEventMerger) { this.DEBUG = DEBUG; this.annotationEngine = annotationEngine; this.USE_FILTERED_READ_MAP_FOR_ANNOTATIONS = USE_FILTERED_READ_MAP_FOR_ANNOTATIONS; noCall.add(Allele.NO_CALL); - this.ldMerger = ldMerger; + this.crossHaplotypeEventMerger = crossHaplotypeEventMerger; } /** @@ -247,8 +247,8 @@ public class GenotypingEngine { cleanUpSymbolicUnassembledEvents( haplotypes ); if ( !in_GGA_mode ) { - // if not in GGA mode and have at least 10 samples try to create MNP and complex events by looking at LD structure - final boolean mergedAnything = ldMerger.mergeConsecutiveEventsBasedOnLD( haplotypes, haplotypeReadMap, startPosKeySet, ref, refLoc ); + // run the event merger if we're not in GGA mode + final boolean mergedAnything = crossHaplotypeEventMerger.merge(haplotypes, haplotypeReadMap, startPosKeySet, ref, refLoc); if ( mergedAnything ) cleanUpSymbolicUnassembledEvents( haplotypes ); // the newly created merged events could be overlapping the unassembled events } diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java index a7aeadde6..c52892373 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/HaplotypeCaller.java @@ -77,10 +77,7 @@ import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; import org.broadinstitute.sting.utils.fragments.FragmentCollection; import org.broadinstitute.sting.utils.fragments.FragmentUtils; import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; -import org.broadinstitute.sting.utils.haplotype.EventMap; -import org.broadinstitute.sting.utils.haplotype.Haplotype; -import org.broadinstitute.sting.utils.haplotype.HaplotypeBaseComparator; -import org.broadinstitute.sting.utils.haplotype.LDMerger; +import org.broadinstitute.sting.utils.haplotype.*; import org.broadinstitute.sting.utils.haplotypeBAMWriter.HaplotypeBAMWriter; import org.broadinstitute.sting.utils.help.DocumentedGATKFeature; import org.broadinstitute.sting.utils.help.HelpConstants; @@ -436,9 +433,11 @@ public class HaplotypeCaller extends ActiveRegionWalker implem likelihoodCalculationEngine = new LikelihoodCalculationEngine( (byte)gcpHMM, DEBUG, pairHMM ); - final LDMerger ldMerger = new LDMerger(DEBUG, dontMergeVariantsViaLD ? 10000000 : 10, dontMergeVariantsViaLD ? 10000000 : 1); + final MergeVariantsAcrossHaplotypes variantMerger = dontMergeVariantsViaLD + ? new MergeVariantsAcrossHaplotypes() + : new LDMerger(DEBUG, 10, 1); - genotypingEngine = new GenotypingEngine( DEBUG, annotationEngine, USE_FILTERED_READ_MAP_FOR_ANNOTATIONS, ldMerger ); + genotypingEngine = new GenotypingEngine( DEBUG, annotationEngine, USE_FILTERED_READ_MAP_FOR_ANNOTATIONS, variantMerger ); if ( bamWriter != null ) haplotypeBAMWriter = HaplotypeBAMWriter.create(bamWriterType, bamWriter, getToolkit().getSAMFileHeader()); diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseVertex.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseVertex.java index 65643a2cc..b075a69a6 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseVertex.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/BaseVertex.java @@ -58,7 +58,8 @@ import java.util.Arrays; */ public class BaseVertex { final byte[] sequence; - int cachedHashCode = -1; + private final static int UNASSIGNED_HASHCODE = -1; + int cachedHashCode = UNASSIGNED_HASHCODE; /** * Create a new sequence vertex with sequence @@ -129,7 +130,7 @@ public class BaseVertex { */ @Override public int hashCode() { - if ( cachedHashCode == -1 ) { + if ( cachedHashCode == UNASSIGNED_HASHCODE ) { cachedHashCode = Arrays.hashCode(sequence); } return cachedHashCode; diff --git a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java index 252ae3449..d91ec0e37 100644 --- a/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java +++ b/protected/java/src/org/broadinstitute/sting/gatk/walkers/haplotypecaller/graphs/Path.java @@ -157,7 +157,7 @@ public class Path { public boolean containsVertex(final T v) { if ( v == null ) throw new IllegalArgumentException("Vertex cannot be null"); - // TODO -- warning this is expense. Need to do vertex caching + // TODO -- warning this is expensive. Need to do vertex caching return getVertices().contains(v); } diff --git a/protected/java/src/org/broadinstitute/sting/utils/haplotype/LDMerger.java b/protected/java/src/org/broadinstitute/sting/utils/haplotype/LDMerger.java index ea00a1901..bbedd1b1a 100644 --- a/protected/java/src/org/broadinstitute/sting/utils/haplotype/LDMerger.java +++ b/protected/java/src/org/broadinstitute/sting/utils/haplotype/LDMerger.java @@ -63,7 +63,7 @@ import java.util.*; * Date: 3/28/13 * Time: 6:17 PM */ -public class LDMerger { +public class LDMerger extends MergeVariantsAcrossHaplotypes { private final static Logger logger = Logger.getLogger(LDMerger.class); private final boolean DEBUG; @@ -71,6 +71,7 @@ public class LDMerger { private final int minSamplesToMergeOtherEvents; public LDMerger(boolean DEBUG, int minSamplesToMergeSNPs, int minSamplesToMergeOtherEvents) { + super(); this.DEBUG = DEBUG; this.minSamplesToMergeSNPs = minSamplesToMergeSNPs; this.minSamplesToMergeOtherEvents = minSamplesToMergeOtherEvents; @@ -98,11 +99,12 @@ public class LDMerger { * @param ref the reference bases * @param refLoc the span of the reference bases */ - public boolean mergeConsecutiveEventsBasedOnLD( final List haplotypes, - final Map haplotypeReadMap, - final TreeSet startPosKeySet, - final byte[] ref, - final GenomeLoc refLoc ) { + @Override + public boolean merge( final List haplotypes, + final Map haplotypeReadMap, + final TreeSet startPosKeySet, + final byte[] ref, + final GenomeLoc refLoc ) { if ( haplotypes == null ) throw new IllegalArgumentException("haplotypes cannot be null"); if ( haplotypeReadMap == null ) throw new IllegalArgumentException("haplotypeReadMap cannot be null"); if ( startPosKeySet == null ) throw new IllegalArgumentException("startPosKeySet cannot be null"); diff --git a/protected/java/src/org/broadinstitute/sting/utils/haplotype/MergeVariantsAcrossHaplotypes.java b/protected/java/src/org/broadinstitute/sting/utils/haplotype/MergeVariantsAcrossHaplotypes.java new file mode 100644 index 000000000..fc47807e0 --- /dev/null +++ b/protected/java/src/org/broadinstitute/sting/utils/haplotype/MergeVariantsAcrossHaplotypes.java @@ -0,0 +1,79 @@ +/* +* By downloading the PROGRAM you agree to the following terms of use: +* +* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY +* +* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). +* +* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and +* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. +* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: +* +* 1. DEFINITIONS +* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. +* +* 2. LICENSE +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. +* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. +* Copyright 2012 Broad Institute, Inc. +* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. +* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. +* +* 4. INDEMNIFICATION +* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. +* +* 5. NO REPRESENTATIONS OR WARRANTIES +* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. +* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. +* +* 6. ASSIGNMENT +* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. +* +* 7. MISCELLANEOUS +* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. +* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. +* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. +* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. +*/ + +package org.broadinstitute.sting.utils.haplotype; + +import org.broadinstitute.sting.utils.GenomeLoc; +import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap; + +import java.util.List; +import java.util.Map; +import java.util.TreeSet; + +/** + * Baseclass for code that wants to merge variants together in the haplotype caller + * + * This root class is basically a no-op, and can be used to not do any merging + */ +public class MergeVariantsAcrossHaplotypes { + /** + * Merge variants across the haplotypes, updating the haplotype event maps and startPos set as appropriate + * + * @param haplotypes a list of haplotypes whose events we want to merge + * @param haplotypeReadMap map from sample name -> read likelihoods for each haplotype + * @param startPosKeySet a set of starting positions of all events among the haplotypes + * @param ref the reference bases + * @param refLoc the span of the reference bases + * @return true if anything was merged + */ + public boolean merge( final List haplotypes, + final Map haplotypeReadMap, + final TreeSet startPosKeySet, + final byte[] ref, + final GenomeLoc refLoc ) { + return false; + } +} diff --git a/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java b/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java index 201e3b9b4..47be30871 100644 --- a/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java +++ b/public/java/src/org/broadinstitute/sting/utils/genotyper/PerReadAlleleLikelihoodMap.java @@ -278,7 +278,7 @@ public class PerReadAlleleLikelihoodMap { } /** - * Is this read poorly modelled by any of the alleles in this map? + * Is this read poorly modelled by all of the alleles in this map? * * A read is poorly modeled when it's likelihood is below what would be expected for a read * originating from one of the alleles given the maxErrorRatePerBase of the reads in general.