diff --git a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/engine/arguments/StandardCallerArgumentCollection.java b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/engine/arguments/StandardCallerArgumentCollection.java index 653b438ec..7c69ab014 100644 --- a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/engine/arguments/StandardCallerArgumentCollection.java +++ b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/engine/arguments/StandardCallerArgumentCollection.java @@ -99,6 +99,11 @@ public class StandardCallerArgumentCollection implements Cloneable { @Argument(fullName = "contamination_fraction_per_sample_file", shortName = "contaminationFile", doc = "Tab-separated File containing fraction of contamination in sequencing data (per sample) to aggressively remove. Format should be \"\" (Contamination is double) per line; No header.", required = false) public File CONTAMINATION_FRACTION_FILE = null; + /** + * Indicates whether there is some sample contamination present. + */ + private boolean sampleContaminationWasLoaded = false; + /** * * @return an _Immutable_ copy of the Sample-Contamination Map, defaulting to CONTAMINATION_FRACTION so that if the sample isn't in the map map(sample)==CONTAMINATION_FRACTION @@ -106,15 +111,32 @@ public class StandardCallerArgumentCollection implements Cloneable { public Map getSampleContamination(){ //make sure that the default value is set up right sampleContamination.setDefaultValue(CONTAMINATION_FRACTION); + if (!Double.isNaN(CONTAMINATION_FRACTION) && CONTAMINATION_FRACTION > 0.0) + sampleContaminationWasLoaded = true; return Collections.unmodifiableMap(sampleContamination); } public void setSampleContamination(DefaultHashMap sampleContamination) { this.sampleContamination.clear(); + this.sampleContaminationWasLoaded = !Double.isNaN(CONTAMINATION_FRACTION) && CONTAMINATION_FRACTION > 0.0; + if (!sampleContaminationWasLoaded) + for (final Double d : sampleContamination.values()) + if (!Double.isNaN(d) && d > 0.0) { + sampleContaminationWasLoaded = true; + break; + } this.sampleContamination.putAll(sampleContamination); this.sampleContamination.setDefaultValue(CONTAMINATION_FRACTION); } + /** + * Returns true if there is some sample contamination present, false otherwise. + * @return {@code true} iff there is some sample contamination + */ + public boolean isSampleContaminationPresent() { + return (!Double.isNaN(CONTAMINATION_FRACTION) && CONTAMINATION_FRACTION > 0.0) || sampleContaminationWasLoaded; + } + //Needs to be here because it uses CONTAMINATION_FRACTION private DefaultHashMap sampleContamination = new DefaultHashMap(CONTAMINATION_FRACTION); diff --git a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/GraphBasedLikelihoodCalculationEngine.java b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/GraphBasedLikelihoodCalculationEngine.java index cfb1b957a..7ba81ca8c 100644 --- a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/GraphBasedLikelihoodCalculationEngine.java +++ b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/GraphBasedLikelihoodCalculationEngine.java @@ -1,58 +1,58 @@ /* * By downloading the PROGRAM you agree to the following terms of use: -* +* * BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY -* +* * This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). -* +* * WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and * WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. * NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: -* +* * 1. DEFINITIONS * 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. -* +* * 2. LICENSE -* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. * The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. * 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. -* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. -* -* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY * LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. * Copyright 2012 Broad Institute, Inc. * Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. * LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. -* +* * 4. INDEMNIFICATION * LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. -* +* * 5. NO REPRESENTATIONS OR WARRANTIES * THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. * IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. -* +* * 6. ASSIGNMENT * This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. -* +* * 7. MISCELLANEOUS * 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. * 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. * 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. -* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. -* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. * 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. * 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. */ + package org.broadinstitute.gatk.tools.walkers.haplotypecaller; import org.apache.log4j.Logger; import org.broadinstitute.gatk.tools.walkers.haplotypecaller.graphs.SeqGraph; import org.broadinstitute.gatk.utils.activeregion.ActiveRegion; -import org.broadinstitute.gatk.utils.exceptions.GATKException; -import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods; import org.broadinstitute.gatk.utils.haplotype.Haplotype; -import org.broadinstitute.gatk.utils.pairhmm.FlexibleHMM; import org.broadinstitute.gatk.utils.pairhmm.FastLoglessPairHMM; +import org.broadinstitute.gatk.utils.pairhmm.FlexibleHMM; import org.broadinstitute.gatk.utils.sam.GATKSAMRecord; import java.io.File; @@ -112,17 +112,16 @@ public class GraphBasedLikelihoodCalculationEngine implements ReadLikelihoodCalc debugMode = debugHaplotypeGraphAndLikelihoods ? DebugMode.EXTRA_DEBUG : debug ? DebugMode.DEBUG : DebugMode.NONE; } - @Override - public Map computeReadLikelihoods(final AssemblyResultSet assemblyResultSet, final Map> perSampleReadList) { + public ReadLikelihoods computeReadLikelihoods(final AssemblyResultSet assemblyResultSet, final List samples, final Map> perSampleReadList) { final GraphBasedLikelihoodCalculationEngineInstance graphLikelihoodEngine = new GraphBasedLikelihoodCalculationEngineInstance(assemblyResultSet, hmm,log10GlobalReadMismappingRate,heterogeneousKmerSizeResolution); final List haplotypes = assemblyResultSet.getHaplotypeList(); final List supportedHaplotypes = graphLikelihoodEngine.getHaplotypeList(); - if (supportedHaplotypes.size() != haplotypes.size()) logger.warn("Some haplotypes were drop due to missing route on the graph (supported / all): " + supportedHaplotypes.size() + "/" + haplotypes.size()); - final Map result = graphLikelihoodEngine.computeReadLikelihoods(supportedHaplotypes, - perSampleReadList ); + if (supportedHaplotypes.size() != haplotypes.size()) + logger.warn("Some haplotypes were drop due to missing route on the graph (supported / all): " + supportedHaplotypes.size() + "/" + haplotypes.size()); + final ReadLikelihoods result = graphLikelihoodEngine.computeReadLikelihoods(supportedHaplotypes,samples,perSampleReadList); if (debugMode != DebugMode.NONE) graphLikelihoodDebugDumps(assemblyResultSet.getRegionForGenotyping(), graphLikelihoodEngine,result); return result; } @@ -131,7 +130,7 @@ public class GraphBasedLikelihoodCalculationEngine implements ReadLikelihoodCalc * A few debug messages associated with the GraphBased likelihoods engine. */ private void graphLikelihoodDebugDumps(final ActiveRegion originalActiveRegion, final GraphBasedLikelihoodCalculationEngineInstance graphLikelihoodEngine, - final Map result) { + final ReadLikelihoods result) { if (graphLikelihoodEngine.hasCycles()) logger.debug("Resulting haplotype graph combining several kmer sizes has cycles"); else if (graphLikelihoodEngine.haplotypeGraph.hasNonReferenceEnds()) @@ -144,14 +143,14 @@ public class GraphBasedLikelihoodCalculationEngine implements ReadLikelihoodCalc sq.simplifyGraph(); sq.printGraph(new File(originalActiveRegion.getLocation() + "-" + graphLikelihoodEngine.getKmerSize() + "-haplotypeSeqGraph.dot"), 10000); try { - FileWriter fw = new FileWriter(new File(originalActiveRegion.getLocation() + "-likelihoods.txt")); - PrintWriter pw = new PrintWriter(fw); + final FileWriter fw = new FileWriter(new File(originalActiveRegion.getLocation() + "-likelihoods.txt")); + final PrintWriter pw = new PrintWriter(fw); //Note: we only output the first sample likelihoods, perhaps should output all of them but for debugging this is normally what is needed. - pw.println(result.entrySet().iterator().next().getValue().toString()); + pw.println(result.sampleMatrix(0)); // need to actually implement a proper toString for the SampleMatrix. pw.close(); fw.close(); - } catch (Exception ex) { - throw new GATKException("", ex); + } catch (final Exception ex) { + throw new IllegalStateException("", ex); } } } diff --git a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/GraphBasedLikelihoodCalculationEngineInstance.java b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/GraphBasedLikelihoodCalculationEngineInstance.java index 50e12842a..7d4b3db1a 100644 --- a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/GraphBasedLikelihoodCalculationEngineInstance.java +++ b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/GraphBasedLikelihoodCalculationEngineInstance.java @@ -57,6 +57,7 @@ import org.broadinstitute.gatk.utils.Utils; import org.broadinstitute.gatk.utils.collections.CountSet; import org.broadinstitute.gatk.utils.collections.Pair; import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods; import org.broadinstitute.gatk.utils.haplotype.Haplotype; import org.broadinstitute.gatk.utils.pairhmm.FlexibleHMM; import org.broadinstitute.gatk.utils.sam.GATKSAMRecord; @@ -215,6 +216,7 @@ public class GraphBasedLikelihoodCalculationEngineInstance { * * @return {@code true} iff so. */ + @SuppressWarnings("unused") public boolean hasVariation() { return hasVariation; } @@ -231,27 +233,24 @@ public class GraphBasedLikelihoodCalculationEngineInstance { * @return never {@code null}, and with at least one entry for input sample (keys in {@code perSampleReadList}. * The value maps can be potentially empty though. */ - public Map computeReadLikelihoods( - final List haplotypes, + public ReadLikelihoods computeReadLikelihoods(final List haplotypes, final List samples, final Map> perSampleReadList) { // General preparation on the input haplotypes: - Collections.sort(haplotypes, Haplotype.ALPHANUMERICAL_COMPARATOR); - final Map alleleVersions = new LinkedHashMap<>(haplotypes.size()); - for (final Haplotype haplotype : haplotypes) - alleleVersions.put(haplotype, Allele.create(haplotype,haplotype.isReference())); + final ReadLikelihoods result = new ReadLikelihoods<>(samples, haplotypes, perSampleReadList); + final List sortedHaplotypes = new ArrayList<>(haplotypes); + Collections.sort(sortedHaplotypes, Haplotype.ALPHANUMERICAL_COMPARATOR); // The actual work: - final HashMap result = new HashMap<>(perSampleReadList.size()); - for (final Map.Entry> e : perSampleReadList.entrySet()) { - final String sample = e.getKey(); - final List reads = e.getValue(); - final Set mayNeedAdjustment = new HashSet<>(reads.size()); + final int sampleCount = result.sampleCount(); + for (int s = 0; s < sampleCount; s++) { + final List sampleReads = result.sampleReads(s); + // Get the cost/likelihood of each read at relevant subpaths on the tree: - final Map> costsByEndingVertex = calculatePathCostsByRead(reads, mayNeedAdjustment); + final Map> costsByEndingVertex = calculatePathCostsByRead(sampleReads); // Create the resulting per-read maps: - final PerReadAlleleLikelihoodMap prallm = calculatePerReadAlleleLikelihoodMap(haplotypes, costsByEndingVertex, alleleVersions); - result.put(sample, prallm); + calculatePerReadAlleleLikelihoodMap(costsByEndingVertex,result.sampleMatrix(s) ); } + result.normalizeLikelihoods(true,log10globalReadMismappingRate); logger.debug("Likelihood analysis summary: reads anchored " + anchoredReads + "/" + (anchoredReads + nonAnchoredReads) + ""); return result; } @@ -263,8 +262,7 @@ public class GraphBasedLikelihoodCalculationEngineInstance { * @param fileName name of the output file. */ public void printGraph(final String fileName) { - if (haplotypeGraph != null) - haplotypeGraph.printGraph(fileName); + haplotypeGraph.printGraph(fileName); } /** @@ -281,36 +279,24 @@ public class GraphBasedLikelihoodCalculationEngineInstance { * * @return {@code true} iff so. */ + @SuppressWarnings("unused") public boolean hasCycles() { - // It is set to null if it contained cycles. - return haplotypeGraph == null; + return haplotypeGraph.hasCycles(); } /** * Builds the result per-read allele likelihood map. * - * @param haplotypes haplotypes to process. - * @param costsEndingByVertex Read vs haplotype graph subpaths cost indexed by ending vertex. - * @param alleleVersions map between haplotypes and the corresponding allele. - * @return never {@code null} although perhaps empty. + * @param costsEndingByVertex Read vs haplotype graph sub-paths cost indexed by ending vertex. + * @param likelihoods matrix where to set the likelihoods where the first index in the haplotype's and the second + * the read. */ - protected PerReadAlleleLikelihoodMap calculatePerReadAlleleLikelihoodMap( - final Collection haplotypes, - final Map> costsEndingByVertex, final Map alleleVersions) { - - final PerReadAlleleLikelihoodMap result = new PerReadAlleleLikelihoodMap(); - if (haplotypeGraph == null) - return result; - final Map maxAlleleLogLk = new HashMap<>(anchoredReads + nonAnchoredReads + 10); - final Set supportedHaplotypes = new LinkedHashSet<>(haplotypeGraph.getHaplotypes()); - supportedHaplotypes.retainAll(haplotypes); - for (final Haplotype haplotype : supportedHaplotypes) - calculatePerReadAlleleLikelihoodMapHaplotypeProcessing(haplotype, alleleVersions, result, maxAlleleLogLk, costsEndingByVertex); - - makeLikelihoodAdjustment(alleleVersions, result, maxAlleleLogLk.keySet(), maxAlleleLogLk); - applyGlobalReadMismappingRate(alleleVersions, result, maxAlleleLogLk); - return result; + protected void calculatePerReadAlleleLikelihoodMap(final Map> costsEndingByVertex, + final ReadLikelihoods.Matrix likelihoods) { + final int alleleCount = likelihoods.alleleCount(); + for (int h = 0; h < alleleCount; h++) + calculatePerReadAlleleLikelihoodMapHaplotypeProcessing(h, likelihoods, costsEndingByVertex); } /** @@ -322,25 +308,24 @@ public class GraphBasedLikelihoodCalculationEngineInstance { * "likelihood". *

* - * @param haplotype the target haplotype - * @param alleleVersions allele version of the haplotypes. These are the ones to be used in the final output. - * @param result target where to add the read-vs-haplotype likelihoods. - * @param maxAlleleLogLk where to place the maximum likelihood achieve on any haplotype for each read. + * @param haplotypeIndex the target haplotype index in the {@code likelihoods} matrix. + * @param likelihoods matrix of likelihoods. * @param costsEndingByVertex read costs assorted by their end vertex. */ - private void calculatePerReadAlleleLikelihoodMapHaplotypeProcessing(final Haplotype haplotype, - final Map alleleVersions, - final PerReadAlleleLikelihoodMap result, - final Map maxAlleleLogLk, + private void calculatePerReadAlleleLikelihoodMapHaplotypeProcessing(final int haplotypeIndex, + final ReadLikelihoods.Matrix likelihoods, final Map> costsEndingByVertex) { + final Haplotype haplotype = likelihoods.allele(haplotypeIndex); final HaplotypeRoute haplotypeRoute = haplotypeGraph.getHaplotypeRoute(haplotype); final Set haplotypeVertices = haplotypeRoute.vertexSet(); final Map readCostByRead = new HashMap<>(); final Set visitedVertices = new HashSet<>(haplotypeVertices.size()); final List edgeList = haplotypeRoute.getEdges(); + MultiDeBruijnVertex currentVertex = haplotypeRoute.getFirstVertex(); Route pathSoFar = new Route<>(currentVertex, haplotypeGraph); final Iterator edgeIterator = edgeList.iterator(); + while (true) { visitedVertices.add(currentVertex); final Set finishingAtElementCostSet = costsEndingByVertex.get(currentVertex); @@ -351,15 +336,12 @@ public class GraphBasedLikelihoodCalculationEngineInstance { currentVertex = pathSoFar.getLastVertex(); } - final List readCosts = new ArrayList<>(readCostByRead.values()); - Collections.sort(readCosts, ReadCost.COMPARATOR); - for (final ReadCost rc : readCosts) - result.add(rc.read, alleleVersions.get(haplotype), rc.getCost()); - - for (final ReadCost rc : readCosts) { - final Double currentMax = maxAlleleLogLk.get(rc.read); - if (currentMax == null || currentMax < rc.getCost()) - maxAlleleLogLk.put(rc.read, rc.getCost()); + int readIndex = 0; + for (final GATKSAMRecord read : likelihoods.reads()) { + final ReadCost rc = readCostByRead.get(read); + //if (rc != null) + likelihoods.set(haplotypeIndex,readIndex,rc == null ? Double.NEGATIVE_INFINITY : rc.getCost()); + readIndex++; } } @@ -443,33 +425,6 @@ public class GraphBasedLikelihoodCalculationEngineInstance { } } - /** - * Makes sure that the reference allele likelihood is not too much smaller that the best alternative allele. - * The justification of this constraint is explained in - * {@link PairHMMLikelihoodCalculationEngine#computeDiploidHaplotypeLikelihoods}. - * - * @param alleleVersions correspondence between input haplotypes and output alleles. - * @param result the target result map. - * @param maxAlleleLogLk for each read indicates the likelihood of the best alternative allele. - */ - private void applyGlobalReadMismappingRate(final Map alleleVersions, - final PerReadAlleleLikelihoodMap result, - final Map maxAlleleLogLk) { - if (!Double.isNaN(log10globalReadMismappingRate) && !Double.isInfinite(log10globalReadMismappingRate)) { - final Allele referenceAllele = alleleVersions.get(haplotypeGraph.getReferenceHaplotype()); - for (final Map.Entry> entry : result.getLikelihoodReadMap().entrySet()) { - final GATKSAMRecord read = entry.getKey(); - final Map likelihoods = entry.getValue(); - final Double maxLogLk = maxAlleleLogLk.get(read); - if (maxAlleleLogLk == null) continue; - final Double referenceLogLk = likelihoods.get(referenceAllele); - final Double minReferenceLogLk = maxLogLk + log10globalReadMismappingRate; - if (referenceLogLk == null || referenceLogLk < minReferenceLogLk) - likelihoods.put(referenceAllele, minReferenceLogLk); - } - } - } - /** * Calculates path costs for a set of reads. *

@@ -479,17 +434,16 @@ public class GraphBasedLikelihoodCalculationEngineInstance { * likelihood (cost) of traversing a possible path across the event block using that read. *

* - * @param reads reads to analyze. - * @param mayNeedAdjustment set where to add reads whose likelihood might need adjustment. + * @param reads reads to analyze. * @return never {@code null}. */ protected Map> calculatePathCostsByRead( - final List reads, final Set mayNeedAdjustment) { + final List reads) { final Map> result = new HashMap<>(reads.size()); if (!hasVariation) return Collections.emptyMap(); for (final GATKSAMRecord r : reads) { - calculatePathCostsByRead(r, mayNeedAdjustment, result); + calculatePathCostsByRead(r, result); } return result; } @@ -498,10 +452,9 @@ public class GraphBasedLikelihoodCalculationEngineInstance { * Calculates path cost for a single read. * * @param read target read. - * @param mayNeedAdjustment set where to add read whose likelihood might need adjustment. * @param result map where to add the result. */ - private void calculatePathCostsByRead(final GATKSAMRecord read, final Set mayNeedAdjustment, + private void calculatePathCostsByRead(final GATKSAMRecord read, final Map> result) { final ReadAnchoring anchoring = new ReadAnchoring(read,haplotypeGraph); @@ -510,14 +463,11 @@ public class GraphBasedLikelihoodCalculationEngineInstance { if (!anchoring.isAnchoredSomewhere()) { defaultToRegularPairHMM(anchoring, result); nonAnchoredReads++; - return; + } else { + calculateReadSegmentCosts(anchoring, hmm, result); + if (!anchoring.isPerfectAnchoring()) danglingEndPathCosts(anchoring, hmm, result); + anchoredReads++; } - - calculateReadSegmentCosts(anchoring, hmm, result); - - if (!anchoring.isPerfectAnchoring()) danglingEndPathCosts(anchoring, hmm, result); - mayNeedAdjustment.add(read); - anchoredReads++; } /** diff --git a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/HaplotypeCaller.java b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/HaplotypeCaller.java index cfb29d272..1e656facc 100644 --- a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/HaplotypeCaller.java +++ b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/HaplotypeCaller.java @@ -48,7 +48,9 @@ package org.broadinstitute.gatk.tools.walkers.haplotypecaller; import com.google.java.contract.Ensures; import htsjdk.samtools.SAMFileWriter; -import org.broadinstitute.gatk.utils.commandline.*; +import htsjdk.variant.variantcontext.*; +import htsjdk.variant.variantcontext.writer.VariantContextWriter; +import htsjdk.variant.vcf.*; import org.broadinstitute.gatk.engine.CommandLineGATK; import org.broadinstitute.gatk.engine.arguments.DbsnpArgumentCollection; import org.broadinstitute.gatk.engine.contexts.AlignmentContext; @@ -75,11 +77,12 @@ import org.broadinstitute.gatk.utils.activeregion.ActiveRegion; import org.broadinstitute.gatk.utils.activeregion.ActiveRegionReadState; import org.broadinstitute.gatk.utils.activeregion.ActivityProfileState; import org.broadinstitute.gatk.utils.clipping.ReadClipper; +import org.broadinstitute.gatk.utils.commandline.*; import org.broadinstitute.gatk.utils.exceptions.UserException; import org.broadinstitute.gatk.utils.fasta.CachingIndexedFastaSequenceFile; import org.broadinstitute.gatk.utils.fragments.FragmentCollection; import org.broadinstitute.gatk.utils.fragments.FragmentUtils; -import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods; import org.broadinstitute.gatk.utils.gga.GenotypingGivenAllelesUtils; import org.broadinstitute.gatk.utils.gvcf.GVCFWriter; import org.broadinstitute.gatk.utils.haplotype.Haplotype; @@ -89,12 +92,10 @@ import org.broadinstitute.gatk.utils.haplotypeBAMWriter.HaplotypeBAMWriter; import org.broadinstitute.gatk.utils.help.DocumentedGATKFeature; import org.broadinstitute.gatk.utils.help.HelpConstants; import org.broadinstitute.gatk.utils.pairhmm.PairHMM; +import org.broadinstitute.gatk.utils.sam.AlignmentUtils; import org.broadinstitute.gatk.utils.sam.GATKSAMRecord; import org.broadinstitute.gatk.utils.sam.ReadUtils; import org.broadinstitute.gatk.utils.variant.GATKVCFIndexType; -import htsjdk.variant.variantcontext.*; -import htsjdk.variant.variantcontext.writer.VariantContextWriter; -import htsjdk.variant.vcf.*; import org.broadinstitute.gatk.utils.variant.HomoSapiensConstants; import java.io.FileNotFoundException; @@ -932,12 +933,12 @@ public class HaplotypeCaller extends ActiveRegionWalker, In final Map> reads = splitReadsBySample( regionForGenotyping.getReads() ); // Calculate the likelihoods: CPU intesive part. - final Map stratifiedReadMap = likelihoodCalculationEngine.computeReadLikelihoods(assemblyResult,reads); + final ReadLikelihoods readLikelihoods = + likelihoodCalculationEngine.computeReadLikelihoods(assemblyResult,samplesList,reads); - // Realign all the reads to the most likely haplotype for use by the annotations - for( final Map.Entry entry : stratifiedReadMap.entrySet() ) { - entry.getValue().realignReadsToMostLikelyHaplotype(haplotypes, assemblyResult.getPaddedReferenceLoc()); - } + // Realign reads to their best haplotype. + final Map readRealignments = realignReadsToTheirBestHaplotype(readLikelihoods, assemblyResult.getPaddedReferenceLoc()); + readLikelihoods.changeReads(readRealignments); // Note: we used to subset down at this point to only the "best" haplotypes in all samples for genotyping, but there // was a bad interaction between that selection and the marginalization that happens over each event when computing @@ -947,7 +948,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In final HaplotypeCallerGenotypingEngine.CalledHaplotypes calledHaplotypes = genotypingEngine.assignGenotypeLikelihoods( haplotypes, - stratifiedReadMap, + readLikelihoods, perSampleFilteredReadList, assemblyResult.getFullReferenceWithPadding(), assemblyResult.getPaddedReferenceLoc(), @@ -964,7 +965,7 @@ public class HaplotypeCaller extends ActiveRegionWalker, In assemblyResult.getPaddedReferenceLoc(), haplotypes, calledHaplotypes.getCalledHaplotypes(), - stratifiedReadMap); + readLikelihoods); } if( SCAC.DEBUG ) { logger.info("----------------------------------------------------------------------------------"); } @@ -982,15 +983,36 @@ public class HaplotypeCaller extends ActiveRegionWalker, In // output variant containing region. result.addAll(referenceConfidenceModel.calculateRefConfidence(assemblyResult.getReferenceHaplotype(), calledHaplotypes.getCalledHaplotypes(), assemblyResult.getPaddedReferenceLoc(), regionForGenotyping, - stratifiedReadMap, calledHaplotypes.getCalls())); + readLikelihoods, calledHaplotypes.getCalls())); // output right-flanking non-variant section: if (trimmingResult.hasRightFlankingRegion()) result.addAll(referenceModelForNoVariation(trimmingResult.nonVariantRightFlankRegion(),false)); return result; } - } else { + } else return calledHaplotypes.getCalls(); + } + + /** + * Returns a map with the original read as a key and the realigned read as the value. + *

+ * Missing keys or equivalent key and value pairs mean that the read was not realigned. + *

+ * @return never {@code null} + */ + private Map realignReadsToTheirBestHaplotype(final ReadLikelihoods originalReadLikelihoods, final GenomeLoc paddedReferenceLoc) { + + final Collection.BestAllele> bestAlleles = originalReadLikelihoods.bestAlleles(); + final Map result = new HashMap<>(bestAlleles.size()); + + for (final ReadLikelihoods.BestAllele bestAllele : bestAlleles) { + final GATKSAMRecord originalRead = bestAllele.read; + final Haplotype bestHaplotype = bestAllele.allele; + final boolean isInformative = bestAllele.isInformative(); + final GATKSAMRecord realignedRead = AlignmentUtils.createReadAlignedToRef(originalRead,bestHaplotype,paddedReferenceLoc.getStart(),isInformative); + result.put(originalRead,realignedRead); } + return result; } private boolean containsCalls(final HaplotypeCallerGenotypingEngine.CalledHaplotypes calledHaplotypes) { @@ -1086,23 +1108,15 @@ public class HaplotypeCaller extends ActiveRegionWalker, In * @param region the active region containing reads * @return a map from sample -> PerReadAlleleLikelihoodMap that maps each read to ref */ - public static Map createDummyStratifiedReadMap(final Haplotype refHaplotype, - final List samples, - final ActiveRegion region) { - final Allele refAllele = Allele.create(refHaplotype, true); + public static ReadLikelihoods createDummyStratifiedReadMap(final Haplotype refHaplotype, + final List samples, + final ActiveRegion region) { + return new ReadLikelihoods<>(samples, Collections.singletonList(refHaplotype), + splitReadsBySample(samples, region.getReads())); - final Map map = new LinkedHashMap<>(1); - for ( final Map.Entry> entry : splitReadsBySample(samples, region.getReads()).entrySet() ) { - final PerReadAlleleLikelihoodMap likelihoodMap = new PerReadAlleleLikelihoodMap(); - for ( final GATKSAMRecord read : entry.getValue() ) { - likelihoodMap.add(read, refAllele, 0.0); - } - map.put(entry.getKey(), likelihoodMap); - } - - return map; } + //--------------------------------------------------------------------------------------------------------------- // // reduce diff --git a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/HaplotypeCallerGenotypingEngine.java b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/HaplotypeCallerGenotypingEngine.java index 90dda170e..32f54436c 100644 --- a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/HaplotypeCallerGenotypingEngine.java +++ b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/HaplotypeCallerGenotypingEngine.java @@ -57,8 +57,7 @@ import org.broadinstitute.gatk.tools.walkers.genotyper.OutputMode; import org.broadinstitute.gatk.utils.GenomeLoc; import org.broadinstitute.gatk.utils.GenomeLocParser; import org.broadinstitute.gatk.utils.Utils; -import org.broadinstitute.gatk.utils.collections.DefaultHashMap; -import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods; import org.broadinstitute.gatk.utils.haplotype.EventMap; import org.broadinstitute.gatk.utils.haplotype.Haplotype; import org.broadinstitute.gatk.utils.haplotype.MergeVariantsAcrossHaplotypes; @@ -74,7 +73,7 @@ import java.util.*; public class HaplotypeCallerGenotypingEngine extends GenotypingEngine { private final static List NO_CALL = Collections.singletonList(Allele.NO_CALL); - private final static int ALLELE_EXTENSION = 2; + private static final int ALLELE_EXTENSION = 2; private MergeVariantsAcrossHaplotypes crossHaplotypeEventMerger; @@ -161,17 +160,17 @@ public class HaplotypeCallerGenotypingEngine extends GenotypingEngine(haplotypes,likelihoods) + * @param readLikelihoods Map from reads->(haplotypes,likelihoods) * @param perSampleFilteredReadList Map from sample to reads that were filtered after assembly and before calculating per-read likelihoods. * @param ref Reference bytes at active region * @param refLoc Corresponding active region genome location * @param activeRegionWindow Active window * @param genomeLocParser GenomeLocParser * @param activeAllelesToGenotype Alleles to genotype - * @param emitReferenceConfidence whether we should add a alternative allele to the result variation contexts. + * @param emitReferenceConfidence whether we should add a <NON_REF> alternative allele to the result variation contexts. * * @return A CalledHaplotypes object containing a list of VC's with genotyped events and called haplotypes * @@ -180,7 +179,7 @@ public class HaplotypeCallerGenotypingEngine extends GenotypingEngine haplotypes, - final Map haplotypeReadMap, + final ReadLikelihoods readLikelihoods, final Map> perSampleFilteredReadList, final byte[] ref, final GenomeLoc refLoc, @@ -191,7 +190,7 @@ public class HaplotypeCallerGenotypingEngine extends GenotypingEngine startPosKeySet = decomposeHaplotypesIntoVariantContexts(haplotypes, haplotypeReadMap, ref, refLoc, activeAllelesToGenotype); + final TreeSet startPosKeySet = decomposeHaplotypesIntoVariantContexts(haplotypes, readLikelihoods, ref, refLoc, activeAllelesToGenotype); // Walk along each position in the key set and create each event to be outputted final Set calledHaplotypes = new HashSet<>(); final List returnCalls = new ArrayList<>(); - final Map emptyDownSamplingMap = new DefaultHashMap<>(0.0); for( final int loc : startPosKeySet ) { if( loc >= activeRegionWindow.getStart() && loc <= activeRegionWindow.getStop() ) { // genotyping an event inside this active region @@ -221,7 +219,9 @@ public class HaplotypeCallerGenotypingEngine extends GenotypingEngine alleleReadMap, final VariantContext mergedVC ) { - final GenotypesContext genotypes = GenotypesContext.create(alleleReadMap.size()); + private GenotypesContext calculateGLsForThisEvent( final ReadLikelihoods readLikelihoods, final VariantContext mergedVC ) { + final GenotypesContext genotypes = GenotypesContext.create(readLikelihoods.sampleCount()); // Grab the genotype likelihoods from the appropriate places in the haplotype likelihood matrix -- calculation performed independently per sample - for( final String sample : alleleReadMap.keySet() ) { + for (final String sample : readLikelihoods.samples() ) { final int numHaplotypes = mergedVC.getAlleles().size(); final double[] genotypeLikelihoods = new double[numHaplotypes * (numHaplotypes+1) / 2]; - final double[][] haplotypeLikelihoodMatrix = PairHMMLikelihoodCalculationEngine.computeDiploidHaplotypeLikelihoods(sample, alleleReadMap, mergedVC.getAlleles(), true); + final double[][] haplotypeLikelihoodMatrix = PairHMMLikelihoodCalculationEngine.computeDiploidHaplotypeLikelihoods(sample, readLikelihoods, mergedVC.getAlleles(), true); int glIndex = 0; for( int iii = 0; iii < numHaplotypes; iii++ ) { for( int jjj = 0; jjj <= iii; jjj++ ) { genotypeLikelihoods[glIndex++] = haplotypeLikelihoodMatrix[iii][jjj]; // for example: AA,AB,BB,AC,BC,CC } } + logger.debug(" Likelihoods for sample " + sample + " : " + Arrays.toString(genotypeLikelihoods)); genotypes.add(new GenotypeBuilder(sample).alleles(NO_CALL).PL(genotypeLikelihoods).make()); } return genotypes; } - private static Map addFilteredReadList(final GenomeLocParser parser, - final Map perSampleReadMap, - final Map> perSampleFilteredReadList, - final VariantContext call, - final boolean requireOverlap) { - - final Map returnMap = new LinkedHashMap<>(); - final GenomeLoc callLoc = ( requireOverlap ? parser.createGenomeLoc(call) : null ); - for( final Map.Entry sample : perSampleReadMap.entrySet() ) { - final PerReadAlleleLikelihoodMap likelihoodMap = new PerReadAlleleLikelihoodMap(); - - for( final Map.Entry> mapEntry : sample.getValue().getLikelihoodReadMap().entrySet() ) { - // only count the read if it overlaps the event, otherwise it is not added to the output read list at all - if( !requireOverlap || callLoc.overlapsP(parser.createGenomeLocUnclipped(mapEntry.getKey())) ) { - for( final Map.Entry alleleDoubleEntry : mapEntry.getValue().entrySet() ) { - likelihoodMap.add(mapEntry.getKey(), alleleDoubleEntry.getKey(), alleleDoubleEntry.getValue()); - } - } - } - - // add all filtered reads to the NO_CALL list because they weren't given any likelihoods - for( final GATKSAMRecord read : perSampleFilteredReadList.get(sample.getKey()) ) { - // only count the read if it overlaps the event, otherwise it is not added to the output read list at all - if( !requireOverlap || callLoc.overlapsP(parser.createGenomeLocUnclipped(read)) ) { - for( final Allele allele : call.getAlleles() ) { - likelihoodMap.add(read, allele, 0.0); - } - } - } - - returnMap.put(sample.getKey(), likelihoodMap); - } - return returnMap; - } - /** * Removes symbolic events from list of haplotypes * @param haplotypes Input/output list of haplotypes, before/after removal @@ -490,48 +486,6 @@ public class HaplotypeCallerGenotypingEngine extends GenotypingEngine (haplotypes, likelihoods) - * @param alleleMapper Map from alleles -> list of haplotypes which support that allele - * @param perSampleDownsamplingFraction Map from samples -> downsampling fraction - * @param genomeLocParser a genome loc parser - * @param eventsToGenotype the alleles to genotype in a single VariantContext, will be null if we don't want to require overlap - * @return Map from reads -> (alleles, likelihoods) - */ - protected Map convertHaplotypeReadMapToAlleleReadMap( final Map haplotypeReadMap, - final Map> alleleMapper, - final Map perSampleDownsamplingFraction, - final GenomeLocParser genomeLocParser, - final VariantContext eventsToGenotype) { - final GenomeLoc callLoc = ( eventsToGenotype != null ? genomeLocParser.createGenomeLoc(eventsToGenotype) : null ); - - final Map alleleReadMap = new LinkedHashMap<>(); - for( final Map.Entry haplotypeReadMapEntry : haplotypeReadMap.entrySet() ) { // for each sample - final PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap = new PerReadAlleleLikelihoodMap(); - for( final Map.Entry> alleleMapperEntry : alleleMapper.entrySet() ) { // for each output allele - final List mappedHaplotypes = alleleMapperEntry.getValue(); - for( final Map.Entry> readEntry : haplotypeReadMapEntry.getValue().getLikelihoodReadMap().entrySet() ) { // for each read - if( eventsToGenotype == null || callLoc.overlapsP(genomeLocParser.createPaddedGenomeLoc(genomeLocParser.createGenomeLocUnclipped(readEntry.getKey()), ALLELE_EXTENSION)) ) { // make sure the read overlaps - double maxLikelihood = Double.NEGATIVE_INFINITY; - for( final Map.Entry alleleDoubleEntry : readEntry.getValue().entrySet() ) { // for each input allele - if( mappedHaplotypes.contains( new Haplotype(alleleDoubleEntry.getKey())) ) { // exact match of haplotype base string - maxLikelihood = Math.max( maxLikelihood, alleleDoubleEntry.getValue() ); - } - } - perReadAlleleLikelihoodMap.add(readEntry.getKey(), alleleMapperEntry.getKey(), maxLikelihood); - } - } - } - perReadAlleleLikelihoodMap.performPerAlleleDownsampling(perSampleDownsamplingFraction.get(haplotypeReadMapEntry.getKey())); // perform contamination downsampling - alleleReadMap.put(haplotypeReadMapEntry.getKey(), perReadAlleleLikelihoodMap); - } - - return alleleReadMap; - } - protected static Map> createAlleleMapper( final Map mergeMap, final Map> eventMap ) { final Map> alleleMapper = new LinkedHashMap<>(); for( final Map.Entry entry : mergeMap.entrySet() ) { diff --git a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/PairHMMLikelihoodCalculationEngine.java b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/PairHMMLikelihoodCalculationEngine.java index 260794497..e662806fe 100644 --- a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/PairHMMLikelihoodCalculationEngine.java +++ b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/PairHMMLikelihoodCalculationEngine.java @@ -53,7 +53,7 @@ import org.apache.log4j.Logger; import org.broadinstitute.gatk.utils.MathUtils; import org.broadinstitute.gatk.utils.QualityUtils; import org.broadinstitute.gatk.utils.exceptions.UserException; -import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods; import org.broadinstitute.gatk.utils.haplotype.Haplotype; import org.broadinstitute.gatk.utils.pairhmm.*; import org.broadinstitute.gatk.utils.recalibration.covariates.RepeatCovariate; @@ -73,6 +73,7 @@ public class PairHMMLikelihoodCalculationEngine implements ReadLikelihoodCalcula public static final byte BASE_QUALITY_SCORE_THRESHOLD = (byte) 18; // Base quals less than this value are squashed down to min possible qual private final byte constantGCP; + private final double log10globalReadMismappingRate; private final PairHMM.HMM_IMPLEMENTATION hmmType; @@ -177,40 +178,6 @@ public class PairHMMLikelihoodCalculationEngine implements ReadLikelihoodCalcula pairHMMThreadLocal.get().close(); } - - private void writeDebugLikelihoods(final GATKSAMRecord processedRead, final Haplotype haplotype, final double log10l){ - if ( WRITE_LIKELIHOODS_TO_FILE ) { - likelihoodsStream.printf("%s %s %s %s %s %s %f%n", - haplotype.getBaseString(), - new String(processedRead.getReadBases() ), - SAMUtils.phredToFastq(processedRead.getBaseQualities() ), - SAMUtils.phredToFastq(processedRead.getBaseInsertionQualities() ), - SAMUtils.phredToFastq(processedRead.getBaseDeletionQualities() ), - SAMUtils.phredToFastq(constantGCP), - log10l); - } - } - - private Map createAlleleMap(List haplotypes){ - final int numHaplotypes = haplotypes.size(); - final Map alleleMap = new LinkedHashMap<>(numHaplotypes); - for ( final Haplotype haplotype : haplotypes ) { - final Allele allele = Allele.create(haplotype, true); - alleleMap.put(allele, haplotype); - } - return alleleMap; - } - - private Map fillGCPArrays(List reads){ - final Map GCPArrayMap = new LinkedHashMap<>(); - for (GATKSAMRecord read: reads){ - byte [] GCPArray = new byte[read.getReadBases().length]; - Arrays.fill( GCPArray, constantGCP ); // Is there a way to derive empirical estimates for this from the data? - GCPArrayMap.put(read, GCPArray); - } - return GCPArrayMap; - } - private void capMinimumReadQualities(GATKSAMRecord read, byte[] readQuals, byte[] readInsQuals, byte[] readDelQuals) { for( int kkk = 0; kkk < readQuals.length; kkk++ ) { readQuals[kkk] = (byte) Math.min( 0xff & readQuals[kkk], read.getMappingQuality()); // cap base quality by mapping quality, as in UG @@ -229,9 +196,9 @@ public class PairHMMLikelihoodCalculationEngine implements ReadLikelihoodCalcula * @return processedReads. A new list of reads, in the same order, whose qualities have been altered by PCR error model and minimal quality thresholding */ private List modifyReadQualities(final List reads) { - List processedReads = new LinkedList<>(); - for ( GATKSAMRecord read : reads ) { + final List result = new ArrayList<>(reads.size()); + for (final GATKSAMRecord read : reads) { final byte[] readBases = read.getReadBases(); // NOTE -- must clone anything that gets modified here so we don't screw up future uses of the read @@ -244,71 +211,9 @@ public class PairHMMLikelihoodCalculationEngine implements ReadLikelihoodCalcula // Create a new copy of the read and sets its base qualities to the modified versions. // Pack this into a new list for return - final GATKSAMRecord processedRead = GATKSAMRecord.createQualityModifiedRead(read, readBases, readQuals, readInsQuals, readDelQuals); - processedReads.add(processedRead); + result.add(GATKSAMRecord.createQualityModifiedRead(read, readBases, readQuals, readInsQuals, readDelQuals)); } - return processedReads; - } - - /** - * Post-processing of the read/allele likelihoods. - * - * We send quality-capped reads to the pairHMM for evaluation, and it returns a map containing these capped reads. - * We wish to return a map containing the original, unmodified reads. - * - * At the same time, we want to effectively set a lower cap on the reference score, based on the global mis-mapping rate. - * This protects us from the case where the assembly has produced haplotypes - * that are very divergent from reference, but are supported by only one read. In effect - * we capping how badly scoring the reference can be for any read by the chance that the read - * itself just doesn't belong here - * - * @param perReadAlleleLikelihoodMap the original map returned by the PairHMM. Contains the processed reads, the haplotype Alleles, and their log10ls - * @param reads Our original, unmodified reads - * @param processedReads Reads whose minimum base,insertion,deletion qualities have been capped; these were actually used to derive log10ls - * @param alleleHaplotypeMap The map associating the Allele and Haplotype versions of each haplotype - * - * @return processedReadAlleleLikelihoodMap; a new PRALM containing the original reads, and their haplotype log10ls including capped reference log10ls - */ - private PerReadAlleleLikelihoodMap capReferenceHaplotypeLikelihoods(PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap, List reads, List processedReads, Map alleleHaplotypeMap){ - - // a new read/allele map, to contain the uncapped reads, haplotypes, and potentially the capped reference log10ls - final PerReadAlleleLikelihoodMap processedReadAlleleLikelihoodMap = new PerReadAlleleLikelihoodMap(); - - final int numReads = reads.size(); - for (int readIndex = 0; readIndex < numReads; readIndex++) { - - // Get the original and quality-modified read from their respective lists - // Note that this requires both lists to have reads in the same order - final GATKSAMRecord originalRead = reads.get(readIndex); - final GATKSAMRecord processedRead = processedReads.get(readIndex); - - double bestNonReflog10L = Double.NEGATIVE_INFINITY; - - for ( final Allele allele : alleleHaplotypeMap.keySet() ) { - final double log10l = perReadAlleleLikelihoodMap.getLikelihoodAssociatedWithReadAndAllele(processedRead, allele); - final Haplotype haplotype = alleleHaplotypeMap.get(allele); - if ( haplotype.isNonReference() ) - bestNonReflog10L = Math.max(bestNonReflog10L, log10l); - writeDebugLikelihoods(processedRead, haplotype, log10l); - - // add the ORIGINAL (non-capped) read to the final map, along with the current haplotype and associated log10l - processedReadAlleleLikelihoodMap.add(originalRead, allele, log10l); - } - - // ensure that any haplotype is no worse than the best non-ref haplotype minus the global - // mismapping rate. This protects us from the case where the assembly has produced haplotypes - // that are very divergent from reference, but are supported by only one read. In effect - // we capping how badly scoring any haplotype can be for any read by the chance that the read - // itself just doesn't belong here - final double worstAllowedLog10l = bestNonReflog10L + log10globalReadMismappingRate; - for ( final Allele allele : alleleHaplotypeMap.keySet() ) { - final double log10l = perReadAlleleLikelihoodMap.getLikelihoodAssociatedWithReadAndAllele(processedRead, allele); - if( log10l < worstAllowedLog10l ) { - processedReadAlleleLikelihoodMap.add(originalRead, allele, worstAllowedLog10l); - } - } - } - return processedReadAlleleLikelihoodMap; + return result; } /** @@ -343,85 +248,109 @@ public class PairHMMLikelihoodCalculationEngine implements ReadLikelihoodCalcula pairHMMThreadLocal.get().finalizeRegion(); } - @Override - public Map computeReadLikelihoods( final AssemblyResultSet assemblyResultSet, final Map> perSampleReadList ) { + public ReadLikelihoods computeReadLikelihoods( final AssemblyResultSet assemblyResultSet, final List samples, final Map> perSampleReadList ) { final List haplotypes = assemblyResultSet.getHaplotypeList(); + // configure the HMM initializePairHMM(haplotypes, perSampleReadList); - // Add likelihoods for each sample's reads to our stratifiedReadMap - final Map stratifiedReadMap = new LinkedHashMap<>(); - for( final Map.Entry> sampleEntry : perSampleReadList.entrySet() ) { - // evaluate the likelihood of the reads given those haplotypes - final PerReadAlleleLikelihoodMap map = computeReadLikelihoods(haplotypes, sampleEntry.getValue()); - - map.filterPoorlyModelledReads(EXPECTED_ERROR_RATE_PER_BASE); - stratifiedReadMap.put(sampleEntry.getKey(), map); + // Add likelihoods for each sample's reads to our result + final ReadLikelihoods result = new ReadLikelihoods<>(samples, haplotypes, perSampleReadList); + final int sampleCount = result.sampleCount(); + for (int s = 0; s < sampleCount; s++) { + final ReadLikelihoods.Matrix sampleLikelihoods = result.sampleMatrix(s); + computeReadLikelihoods(sampleLikelihoods); } - //Used mostly by the JNI implementation(s) to free arrays - finalizePairHMM(); - return stratifiedReadMap; + result.normalizeLikelihoods(false, log10globalReadMismappingRate); + result.filterPoorlyModeledReads(EXPECTED_ERROR_RATE_PER_BASE); + finalizePairHMM(); + return result; } - - private PerReadAlleleLikelihoodMap computeReadLikelihoods( final List haplotypes, final List reads) { + private void computeReadLikelihoods( final ReadLikelihoods.Matrix likelihoods) { // Modify the read qualities by applying the PCR error model and capping the minimum base,insertion,deletion qualities - List processedReads = modifyReadQualities(reads); - - // Get alleles corresponding to our haplotypees - Map alleleHaplotypeMap = createAlleleMap(haplotypes); - - // Get an array containing the constantGCP for each read in our modified read list - Map GCPArrayMap = fillGCPArrays(processedReads); + final List processedReads = modifyReadQualities(likelihoods.reads()); // Run the PairHMM to calculate the log10 likelihood of each (processed) reads' arising from each haplotype - PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap = pairHMMThreadLocal.get().computeLikelihoods(processedReads, alleleHaplotypeMap, GCPArrayMap); + pairHMMThreadLocal.get().computeLikelihoods(likelihoods,processedReads,constantGCP); - // Generate a new map containing the original, unmodified reads, and with minimal reference haplotype log10ls determined from the global mis-mapping rate + if (WRITE_LIKELIHOODS_TO_FILE) + writeDebugLikelihoods(likelihoods); + } - return capReferenceHaplotypeLikelihoods(perReadAlleleLikelihoodMap, reads, processedReads, alleleHaplotypeMap); + private void writeDebugLikelihoods(final ReadLikelihoods.Matrix likelihoods) { + final List reads = likelihoods.reads(); + final List haplotypes = likelihoods.alleles(); + final int haplotypeCount = haplotypes.size(); + final int readCount = reads.size(); + for (int r = 0; r < readCount; r++) + for (int a = 0; a < haplotypeCount; a++) + writeDebugLikelihoods(reads.get(r),haplotypes.get(a),likelihoods.get(a,r)); + likelihoodsStream.flush(); + } + + private void writeDebugLikelihoods(final GATKSAMRecord processedRead, final Haplotype haplotype, final double log10l){ + likelihoodsStream.printf("%s %s %s %s %s %s %f%n", + haplotype.getBaseString(), + new String(processedRead.getReadBases() ), + SAMUtils.phredToFastq(processedRead.getBaseQualities()), + SAMUtils.phredToFastq(processedRead.getBaseInsertionQualities() ), + SAMUtils.phredToFastq(processedRead.getBaseDeletionQualities() ), + SAMUtils.phredToFastq(constantGCP), + log10l); } @Requires({"alleleOrdering.size() > 0"}) @Ensures({"result.length == result[0].length", "result.length == alleleOrdering.size()"}) + @Deprecated public static double[][] computeDiploidHaplotypeLikelihoods( final String sample, - final Map stratifiedReadMap, - final List alleleOrdering, + final ReadLikelihoods readLikelihoods, + final List alleleOrdering, final boolean normalize ) { - return computeDiploidHaplotypeLikelihoods(Collections.singleton(sample), stratifiedReadMap, alleleOrdering, normalize); + return computeDiploidHaplotypeLikelihoods(Collections.singleton(sample), readLikelihoods, alleleOrdering, normalize); } @Requires({"alleleOrdering.size() > 0"}) @Ensures({"result.length == result[0].length", "result.length == alleleOrdering.size()"}) - public static double[][] computeDiploidHaplotypeLikelihoods( final Set samples, - final Map stratifiedReadMap, - final List alleleOrdering, + @Deprecated + private static double[][] computeDiploidHaplotypeLikelihoods( final Set samples, + final ReadLikelihoods readLikelihoods, + final List alleleOrdering, final boolean normalize) { final int numHaplotypes = alleleOrdering.size(); + final int[] alleleIndices = new int[alleleOrdering.size()]; + final ListIterator alleleIterator = alleleOrdering.listIterator(); + int nextAlleleIndex = 0; + while (alleleIterator.hasNext()) + if ((alleleIndices[nextAlleleIndex++] = readLikelihoods.alleleIndex((Allele) alleleIterator.next())) == -1) + throw new IllegalArgumentException("allele " + alleleIterator.previous() + " not found in likelihood collection "); + final double[][] haplotypeLikelihoodMatrix = new double[numHaplotypes][numHaplotypes]; - for( int iii = 0; iii < numHaplotypes; iii++ ) { - Arrays.fill(haplotypeLikelihoodMatrix[iii], Double.NEGATIVE_INFINITY); - } // compute the diploid haplotype likelihoods - for( int iii = 0; iii < numHaplotypes; iii++ ) { - final Allele iii_allele = alleleOrdering.get(iii); - for( int jjj = 0; jjj <= iii; jjj++ ) { - final Allele jjj_allele = alleleOrdering.get(jjj); - double haplotypeLikelihood = 0.0; - for( final String sample : samples ) { - for( final Map.Entry> entry : stratifiedReadMap.get(sample).getLikelihoodReadMap().entrySet() ) { - // Compute log10(10^x1/2 + 10^x2/2) = log10(10^x1+10^x2)-log10(2) - // First term is approximated by Jacobian log with table lookup. - haplotypeLikelihood += ( MathUtils.approximateLog10SumLog10(entry.getValue().get(iii_allele), entry.getValue().get(jjj_allele)) + MathUtils.LOG_ONE_HALF ); + for(final String sample : samples) { + final int sampleIndex = readLikelihoods.sampleIndex(sample); + if (sampleIndex == -1) + throw new IllegalArgumentException("the sample provided is not in the likelihood collection"); + final ReadLikelihoods.Matrix sampleLikelihoods = readLikelihoods.sampleMatrix(sampleIndex); + final int sampleReadCount = readLikelihoods.sampleReadCount(sampleIndex); + for( int iii = 0; iii < numHaplotypes; iii++ ) { + final int iii_allele = alleleIndices[iii]; + for( int jjj = 0; jjj <= iii; jjj++ ) { + final int jjj_allele = alleleIndices[jjj]; + double haplotypeLikelihood = 0.0; + for (int r = 0; r < sampleReadCount; r++) { + final double value = MathUtils.approximateLog10SumLog10(sampleLikelihoods.get(iii_allele,r), + sampleLikelihoods.get(jjj_allele,r)) + MathUtils.LOG_ONE_HALF; + haplotypeLikelihood += value; } + haplotypeLikelihoodMatrix[iii][jjj] += haplotypeLikelihood; } - haplotypeLikelihoodMatrix[iii][jjj] = haplotypeLikelihood; } } @@ -431,6 +360,7 @@ public class PairHMMLikelihoodCalculationEngine implements ReadLikelihoodCalcula @Requires({"likelihoodMatrix.length == likelihoodMatrix[0].length"}) @Ensures({"result.length == result[0].length", "result.length == likelihoodMatrix.length"}) + @Deprecated protected static double[][] normalizeDiploidLikelihoodMatrixFromLog10( final double[][] likelihoodMatrix ) { final int numHaplotypes = likelihoodMatrix.length; double[] genotypeLikelihoods = new double[numHaplotypes*(numHaplotypes+1)/2]; @@ -450,131 +380,6 @@ public class PairHMMLikelihoodCalculationEngine implements ReadLikelihoodCalcula return likelihoodMatrix; } - // -------------------------------------------------------------------------------- - // - // System to compute the best N haplotypes for genotyping - // - // -------------------------------------------------------------------------------- -// -// /** -// * Helper function for selectBestHaplotypesFromEachSample that updates the score of haplotype haplotypeAsAllele -// * @param map an annoying map object that moves us between the allele and haplotype representation -// * @param haplotypeAsAllele the allele version of the haplotype -// * @return the haplotype version, with its score incremented by 1 if its non-reference -// */ -// private Haplotype updateSelectHaplotype(final Map map, final Allele haplotypeAsAllele) { -// final Haplotype h = map.get(haplotypeAsAllele); // TODO -- fixme when haplotypes are properly generic -// if ( h.isNonReference() ) h.setScore(h.getScore() + 1); // ref is already at max value -// return h; -// } -// -// /** -// * Take the best N haplotypes and return them as a list -// * -// * Only considers the haplotypes selectedHaplotypes that were actually selected by at least one sample -// * as it's preferred haplotype. Takes the best N haplotypes from selectedHaplotypes in decreasing -// * order of score (so higher score haplotypes are preferred). The N we take is determined by -// * -// * N = min(2 * nSamples + 1, maxNumHaplotypesInPopulation) -// * -// * where 2 * nSamples is the number of chromosomes in 2 samples including the reference, and our workload is -// * bounded by maxNumHaplotypesInPopulation as that number can grow without bound -// * -// * @param selectedHaplotypes a non-null set of haplotypes with scores >= 1 -// * @param nSamples the number of samples used to select the haplotypes -// * @param maxNumHaplotypesInPopulation the maximum number of haplotypes we're allowed to take, regardless of nSamples -// * @return a list of N or fewer haplotypes, with the reference haplotype first -// */ -// private List selectBestHaplotypesAccordingToScore(final Set selectedHaplotypes, final int nSamples, final int maxNumHaplotypesInPopulation) { -// final List selectedHaplotypesList = new ArrayList<>(selectedHaplotypes); -// Collections.sort(selectedHaplotypesList, new HaplotypeScoreComparator()); -// final int numChromosomesInSamplesPlusRef = 2 * nSamples + 1; -// final int haplotypesToKeep = Math.min(numChromosomesInSamplesPlusRef, maxNumHaplotypesInPopulation); -// final List bestHaplotypes = selectedHaplotypesList.size() <= haplotypesToKeep ? selectedHaplotypesList : selectedHaplotypesList.subList(0, haplotypesToKeep); -// if ( bestHaplotypes.get(0).isNonReference()) throw new IllegalStateException("BUG: reference haplotype should be first in list"); -// return bestHaplotypes; -// } -// -// /** -// * Select the best haplotypes for genotyping the samples in stratifiedReadMap -// * -// * Selects these haplotypes by counting up how often each haplotype is selected as one of the most likely -// * haplotypes per sample. What this means is that each sample computes the diploid genotype likelihoods for -// * all possible pairs of haplotypes, and the pair with the highest likelihood has each haplotype each get -// * one extra count for each haplotype (so hom-var haplotypes get two counts). After performing this calculation -// * the best N haplotypes are selected (@see #selectBestHaplotypesAccordingToScore) and a list of the -// * haplotypes in order of score are returned, ensuring that at least one of the haplotypes is reference. -// * -// * @param haplotypes a list of all haplotypes we're considering -// * @param stratifiedReadMap a map from sample -> read likelihoods per haplotype -// * @param maxNumHaplotypesInPopulation the max. number of haplotypes we can select from haplotypes -// * @return a list of selected haplotypes with size <= maxNumHaplotypesInPopulation -// */ -// public List selectBestHaplotypesFromEachSample(final List haplotypes, final Map stratifiedReadMap, final int maxNumHaplotypesInPopulation) { -// if ( haplotypes.size() < 2 ) throw new IllegalArgumentException("Must have at least 2 haplotypes to consider but only have " + haplotypes); -// -// if ( haplotypes.size() == 2 ) return haplotypes; // fast path -- we'll always want to use 2 haplotypes -// -// // all of the haplotypes that at least one sample called as one of the most likely -// final Set selectedHaplotypes = new HashSet<>(); -// selectedHaplotypes.add(findReferenceHaplotype(haplotypes)); // ref is always one of the selected -// -// // our annoying map from allele -> haplotype -// final Map allele2Haplotype = new HashMap<>(); -// for ( final Haplotype h : haplotypes ) { -// h.setScore(h.isReference() ? Double.MAX_VALUE : 0.0); // set all of the scores to 0 (lowest value) for all non-ref haplotypes -// allele2Haplotype.put(Allele.create(h, h.isReference()), h); -// } -// -// // for each sample, compute the most likely pair of haplotypes -// for ( final Map.Entry entry : stratifiedReadMap.entrySet() ) { -// // get the two most likely haplotypes under a diploid model for this sample -// final MostLikelyAllele mla = entry.getValue().getMostLikelyDiploidAlleles(); -// -// if ( mla != null ) { // there was something to evaluate in this sample -// // note that there must be at least 2 haplotypes -// final Haplotype best = updateSelectHaplotype(allele2Haplotype, mla.getMostLikelyAllele()); -// final Haplotype second = updateSelectHaplotype(allele2Haplotype, mla.getSecondMostLikelyAllele()); -// -//// if ( DEBUG ) { -//// logger.info("Chose haplotypes " + best + " " + best.getCigar() + " and " + second + " " + second.getCigar() + " for sample " + entry.getKey()); -//// } -// -// // add these two haplotypes to the set of haplotypes that have been selected -// selectedHaplotypes.add(best); -// selectedHaplotypes.add(second); -// -// // we've already selected all of our haplotypes, and we don't need to prune them down -// if ( selectedHaplotypes.size() == haplotypes.size() && haplotypes.size() < maxNumHaplotypesInPopulation ) -// break; -// } -// } -// -// // take the best N haplotypes forward, in order of the number of samples that choose them -// final int nSamples = stratifiedReadMap.size(); -// final List bestHaplotypes = selectBestHaplotypesAccordingToScore(selectedHaplotypes, nSamples, maxNumHaplotypesInPopulation); -// -// if ( DEBUG ) { -// logger.info("Chose " + (bestHaplotypes.size() - 1) + " alternate haplotypes to genotype in all samples."); -// for ( final Haplotype h : bestHaplotypes ) { -// logger.info("\tHaplotype " + h.getCigar() + " selected for further genotyping" + (h.isNonReference() ? " found " + (int)h.getScore() + " haplotypes" : " as ref haplotype")); -// } -// } -// return bestHaplotypes; -// } -// -// /** -// * Find the haplotype that isRef(), or @throw ReviewedGATKException if one isn't found -// * @param haplotypes non-null list of haplotypes -// * @return the reference haplotype -// */ -// private static Haplotype findReferenceHaplotype( final List haplotypes ) { -// for( final Haplotype h : haplotypes ) { -// if( h.isReference() ) return h; -// } -// throw new ReviewedGATKException( "No reference haplotype found in the list of haplotypes!" ); -// } - // -------------------------------------------------------------------------------- // // Experimental attempts at PCR error rate modeling diff --git a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/RandomLikelihoodCalculationEngine.java b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/RandomLikelihoodCalculationEngine.java index 7edcfde39..bd72a764d 100644 --- a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/RandomLikelihoodCalculationEngine.java +++ b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/RandomLikelihoodCalculationEngine.java @@ -1,52 +1,53 @@ /* * By downloading the PROGRAM you agree to the following terms of use: -* +* * BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY -* +* * This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE). -* +* * WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and * WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions. * NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows: -* +* * 1. DEFINITIONS * 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE. -* +* * 2. LICENSE -* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. +* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM. * The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement. * 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement. -* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. -* -* 3. OWNERSHIP OF INTELLECTUAL PROPERTY +* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM. +* +* 3. OWNERSHIP OF INTELLECTUAL PROPERTY * LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication. * Copyright 2012 Broad Institute, Inc. * Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc. * LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes. -* +* * 4. INDEMNIFICATION * LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement. -* +* * 5. NO REPRESENTATIONS OR WARRANTIES * THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME. * IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING. -* +* * 6. ASSIGNMENT * This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void. -* +* * 7. MISCELLANEOUS * 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries. * 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes. * 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4. -* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. -* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. +* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt. +* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter. * 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement. * 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles. */ + package org.broadinstitute.gatk.tools.walkers.haplotypecaller; import org.broadinstitute.gatk.engine.GenomeAnalysisEngine; -import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods; import org.broadinstitute.gatk.utils.haplotype.Haplotype; import org.broadinstitute.gatk.utils.sam.GATKSAMRecord; import htsjdk.variant.variantcontext.Allele; @@ -62,21 +63,25 @@ import java.util.Random; public class RandomLikelihoodCalculationEngine implements ReadLikelihoodCalculationEngine { @Override - public Map computeReadLikelihoods(final AssemblyResultSet assemblyResultSet, final Map> reads) { + public ReadLikelihoods computeReadLikelihoods(final AssemblyResultSet assemblyResultSet, + final List samples, + final Map> reads) { final List haplotypes = assemblyResultSet.getHaplotypeList(); - final Map result = new HashMap<>(reads.size()); + final ReadLikelihoods result = new ReadLikelihoods(samples, haplotypes, reads); final Map alleles = new HashMap<>(haplotypes.size()); for (final Haplotype haplotype : haplotypes) alleles.put(haplotype,Allele.create(haplotype,false)); final Random rnd = GenomeAnalysisEngine.getRandomGenerator(); - for (final String sample : reads.keySet()) { - final PerReadAlleleLikelihoodMap pralm = new PerReadAlleleLikelihoodMap(); - for (final GATKSAMRecord read : reads.get(sample)) - for (final Haplotype haplotype : haplotypes ) - pralm.add(read,alleles.get(haplotype),-Math.abs(rnd.nextDouble())); - result.put(sample,pralm); + final int sampleCount = samples.size(); + final int alleleCount = alleles.size(); + for (int i = 0; i < sampleCount; i++) { + final List sampleReads = result.sampleReads(i); + final int readCount = sampleReads.size(); + final ReadLikelihoods.Matrix sampleLikelihoods = result.sampleMatrix(i); + for (int a = 0; a < alleleCount; a++) + for (int r = 0; r < readCount; r++) + sampleLikelihoods.set(a,r,-Math.abs(rnd.nextDouble())); } - return result; } diff --git a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/ReadLikelihoodCalculationEngine.java b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/ReadLikelihoodCalculationEngine.java index 535ca813f..6119cba1c 100644 --- a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/ReadLikelihoodCalculationEngine.java +++ b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/ReadLikelihoodCalculationEngine.java @@ -46,7 +46,8 @@ package org.broadinstitute.gatk.tools.walkers.haplotypecaller; -import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods; +import org.broadinstitute.gatk.utils.haplotype.Haplotype; import org.broadinstitute.gatk.utils.sam.GATKSAMRecord; import java.util.List; @@ -87,7 +88,7 @@ public interface ReadLikelihoodCalculationEngine { * @return never {@code null}, and with at least one entry for input sample (keys in {@code perSampleReadList}. * The value maps can be potentially empty though. */ - public Map computeReadLikelihoods(AssemblyResultSet assemblyResultSet, + public ReadLikelihoods computeReadLikelihoods(AssemblyResultSet assemblyResultSet, List samples, Map> perSampleReadList); public void close(); diff --git a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/ReferenceConfidenceModel.java b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/ReferenceConfidenceModel.java index fd8769f4e..dc9512eee 100644 --- a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/ReferenceConfidenceModel.java +++ b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/tools/walkers/haplotypecaller/ReferenceConfidenceModel.java @@ -47,13 +47,16 @@ package org.broadinstitute.gatk.tools.walkers.haplotypecaller; import htsjdk.samtools.*; +import htsjdk.variant.variantcontext.*; +import htsjdk.variant.vcf.VCFHeaderLine; +import htsjdk.variant.vcf.VCFSimpleHeaderLine; import org.broadinstitute.gatk.engine.contexts.AlignmentContext; import org.broadinstitute.gatk.utils.GenomeLoc; import org.broadinstitute.gatk.utils.GenomeLocParser; import org.broadinstitute.gatk.utils.MathUtils; import org.broadinstitute.gatk.utils.QualityUtils; import org.broadinstitute.gatk.utils.activeregion.ActiveRegion; -import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods; import org.broadinstitute.gatk.utils.haplotype.Haplotype; import org.broadinstitute.gatk.utils.locusiterator.LocusIteratorByState; import org.broadinstitute.gatk.utils.pileup.PileupElement; @@ -62,9 +65,6 @@ import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl; import org.broadinstitute.gatk.utils.sam.AlignmentUtils; import org.broadinstitute.gatk.utils.sam.GATKSAMRecord; import org.broadinstitute.gatk.utils.variant.GATKVariantContextUtils; -import htsjdk.variant.variantcontext.*; -import htsjdk.variant.vcf.VCFHeaderLine; -import htsjdk.variant.vcf.VCFSimpleHeaderLine; import java.io.File; import java.util.*; @@ -86,6 +86,7 @@ public class ReferenceConfidenceModel { private final GenomeLocParser genomeLocParser; private final Set samples; + private final SAMFileHeader header; // TODO -- really shouldn't depend on this private final int indelInformativeDepthIndelSize; private final static boolean WRITE_DEBUGGING_BAM = false; @@ -113,6 +114,7 @@ public class ReferenceConfidenceModel { this.genomeLocParser = genomeLocParser; this.samples = samples; + this.header = header; this.indelInformativeDepthIndelSize = indelInformativeDepthIndelSize; if ( WRITE_DEBUGGING_BAM ) { @@ -156,10 +158,10 @@ public class ReferenceConfidenceModel { * * @param refHaplotype the reference haplotype, used to get the reference bases across activeRegion.getLoc() * @param calledHaplotypes a list of haplotypes that segregate in this region, for realignment of the reads in the - * stratifiedReadMap, corresponding to each reads best haplotype. Must contain the refHaplotype. + * readLikelihoods, corresponding to each reads best haplotype. Must contain the refHaplotype. * @param paddedReferenceLoc the location of refHaplotype (which might be larger than activeRegion.getLoc()) * @param activeRegion the active region we want to get the reference confidence over - * @param stratifiedReadMap a map from a single sample to its PerReadAlleleLikelihoodMap for each haplotype in calledHaplotypes + * @param readLikelihoods a map from a single sample to its PerReadAlleleLikelihoodMap for each haplotype in calledHaplotypes * @param variantCalls calls made in this region. The return result will contain any variant call in this list in the * correct order by genomic position, and any variant in this list will stop us emitting a ref confidence * under any position it covers (for snps and insertions that is 1 bp, but for deletions its the entire ref span) @@ -170,22 +172,22 @@ public class ReferenceConfidenceModel { final Collection calledHaplotypes, final GenomeLoc paddedReferenceLoc, final ActiveRegion activeRegion, - final Map stratifiedReadMap, + final ReadLikelihoods readLikelihoods, final List variantCalls) { if ( refHaplotype == null ) throw new IllegalArgumentException("refHaplotype cannot be null"); if ( calledHaplotypes == null ) throw new IllegalArgumentException("calledHaplotypes cannot be null"); if ( !calledHaplotypes.contains(refHaplotype)) throw new IllegalArgumentException("calledHaplotypes must contain the refHaplotype"); if ( paddedReferenceLoc == null ) throw new IllegalArgumentException("paddedReferenceLoc cannot be null"); if ( activeRegion == null ) throw new IllegalArgumentException("activeRegion cannot be null"); - if ( stratifiedReadMap == null ) throw new IllegalArgumentException("stratifiedReadMap cannot be null"); - if ( stratifiedReadMap.size() != 1 ) throw new IllegalArgumentException("stratifiedReadMap must contain exactly one sample but it contained " + stratifiedReadMap.size()); + if ( readLikelihoods == null ) throw new IllegalArgumentException("readLikelihoods cannot be null"); + if ( readLikelihoods.sampleCount() != 1 ) throw new IllegalArgumentException("readLikelihoods must contain exactly one sample but it contained " + readLikelihoods.sampleCount()); if ( refHaplotype.length() != activeRegion.getExtendedLoc().size() ) throw new IllegalArgumentException("refHaplotype " + refHaplotype.length() + " and activeRegion location size " + activeRegion.getLocation().size() + " are different"); final GenomeLoc refSpan = activeRegion.getLocation(); - final List refPileups = getPileupsOverReference(refHaplotype, calledHaplotypes, paddedReferenceLoc, activeRegion, refSpan, stratifiedReadMap); + final List refPileups = getPileupsOverReference(refHaplotype, calledHaplotypes, paddedReferenceLoc, activeRegion, refSpan, readLikelihoods); final byte[] ref = refHaplotype.getBases(); final List results = new ArrayList<>(refSpan.size()); - final String sampleName = stratifiedReadMap.keySet().iterator().next(); + final String sampleName = readLikelihoods.sample(0); final int globalRefOffset = refSpan.getStart() - activeRegion.getExtendedLoc().getStart(); for ( final ReadBackedPileup pileup : refPileups ) { @@ -311,15 +313,15 @@ public class ReferenceConfidenceModel { final GenomeLoc paddedReferenceLoc, final ActiveRegion activeRegion, final GenomeLoc activeRegionSpan, - final Map stratifiedReadMap) { + final ReadLikelihoods readLikelihoods) { if ( refHaplotype == null ) throw new IllegalArgumentException("refHaplotype cannot be null"); if ( calledHaplotypes == null ) throw new IllegalArgumentException("calledHaplotypes cannot be null"); if ( !calledHaplotypes.contains(refHaplotype)) throw new IllegalArgumentException("calledHaplotypes must contain the refHaplotype"); if ( paddedReferenceLoc == null ) throw new IllegalArgumentException("paddedReferenceLoc cannot be null"); if ( activeRegion == null ) throw new IllegalArgumentException("activeRegion cannot be null"); - if ( stratifiedReadMap == null ) throw new IllegalArgumentException("stratifiedReadMap cannot be null"); - if ( stratifiedReadMap.size() != 1 ) throw new IllegalArgumentException("stratifiedReadMap must contain exactly one sample but it contained " + stratifiedReadMap.size()); + if ( readLikelihoods == null ) throw new IllegalArgumentException("readLikelihoods cannot be null"); + if ( readLikelihoods.sampleCount() != 1 ) throw new IllegalArgumentException("readLikelihoods must contain exactly one sample but it contained " + readLikelihoods.sampleCount()); final List reads = activeRegion.getReads(); diff --git a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeLDCalculator.java b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeLDCalculator.java index ac33221d5..2c17d4d5c 100644 --- a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeLDCalculator.java +++ b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotype/HaplotypeLDCalculator.java @@ -47,11 +47,10 @@ package org.broadinstitute.gatk.utils.haplotype; import com.google.java.contract.Requires; +import htsjdk.variant.variantcontext.VariantContext; import org.broadinstitute.gatk.tools.walkers.haplotypecaller.PairHMMLikelihoodCalculationEngine; import org.broadinstitute.gatk.utils.MathUtils; -import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap; -import htsjdk.variant.variantcontext.Allele; -import htsjdk.variant.variantcontext.VariantContext; +import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods; import java.util.*; @@ -66,7 +65,7 @@ import java.util.*; */ public class HaplotypeLDCalculator { private final List haplotypes; - private final Map haplotypeReadMap; + private final ReadLikelihoods readLikelihoods; private List> haplotypeLikelihoodsPerSample = null; // linear contigency table with table[0] == [0][0], table[1] = [0][1], table[2] = [1][0], table[3] = [1][1] @@ -75,14 +74,15 @@ public class HaplotypeLDCalculator { /** * For testing */ + @SuppressWarnings("unchecked") protected HaplotypeLDCalculator() { haplotypes = Collections.emptyList(); - haplotypeReadMap = Collections.emptyMap(); + readLikelihoods = new ReadLikelihoods<>((List)Collections.EMPTY_LIST, (List)Collections.EMPTY_LIST, Collections.EMPTY_MAP); } - public HaplotypeLDCalculator(List haplotypes, Map haplotypeReadMap) { + public HaplotypeLDCalculator(final List haplotypes, final ReadLikelihoods haplotypeReadMap) { this.haplotypes = haplotypes; - this.haplotypeReadMap = haplotypeReadMap; + this.readLikelihoods = haplotypeReadMap; } /** @@ -94,13 +94,13 @@ public class HaplotypeLDCalculator { private void buildHaplotypeLikelihoodsPerSampleIfNecessary() { if ( haplotypeLikelihoodsPerSample == null ) { // do the lazy computation - final Set samples = haplotypeReadMap.keySet(); - haplotypeLikelihoodsPerSample = new LinkedList>(); + final Set samples = new LinkedHashSet<>(readLikelihoods.samples()); + haplotypeLikelihoodsPerSample = new LinkedList<>(); for( final String sample : samples ) { - final Map map = new HashMap(haplotypes.size()); + final Map map = new HashMap<>(haplotypes.size()); for( final Haplotype h : haplotypes ) { // count up the co-occurrences of the events for the R^2 calculation - final double haplotypeLikelihood = PairHMMLikelihoodCalculationEngine.computeDiploidHaplotypeLikelihoods(sample, haplotypeReadMap, Collections.singletonList(Allele.create(h, true)), false)[0][0]; + final double haplotypeLikelihood = PairHMMLikelihoodCalculationEngine.computeDiploidHaplotypeLikelihoods(sample, readLikelihoods, Collections.singletonList(h), false)[0][0]; map.put(h, haplotypeLikelihood); } haplotypeLikelihoodsPerSample.add(map); @@ -162,7 +162,7 @@ public class HaplotypeLDCalculator { * * The probability is just p11_22 / (p11_22 + p hets) * - * @table linear contigency table with table[0] == [0][0], table[1] = [0][1], table[2] = [1][0], table[3] = [1][1] + * @param table linear contigency table with table[0] == [0][0], table[1] = [0][1], table[2] = [1][0], table[3] = [1][1] * doesn't have to be normalized as this function does the normalization internally * @return the real space probability that the data is phased */ diff --git a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotype/LDMerger.java b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotype/LDMerger.java index 710fbc32c..f5f6a5bb8 100644 --- a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotype/LDMerger.java +++ b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotype/LDMerger.java @@ -49,12 +49,15 @@ package org.broadinstitute.gatk.utils.haplotype; import org.apache.commons.lang.ArrayUtils; import org.apache.log4j.Logger; import org.broadinstitute.gatk.utils.GenomeLoc; -import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap; import htsjdk.variant.variantcontext.Allele; import htsjdk.variant.variantcontext.VariantContext; import htsjdk.variant.variantcontext.VariantContextBuilder; +import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods; -import java.util.*; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.TreeSet; /** * Merges VariantContexts in a series of haplotypes according to their pairwise LD @@ -94,19 +97,19 @@ public class LDMerger extends MergeVariantsAcrossHaplotypes { * Merge as many events among the haplotypes as possible based on pairwise LD among variants * * @param haplotypes a list of haplotypes whose events we want to merge - * @param haplotypeReadMap map from sample name -> read likelihoods for each haplotype + * @param readLikelihoods map from sample name -> read likelihoods for each haplotype * @param startPosKeySet a set of starting positions of all events among the haplotypes * @param ref the reference bases * @param refLoc the span of the reference bases */ @Override public boolean merge( final List haplotypes, - final Map haplotypeReadMap, + final ReadLikelihoods readLikelihoods, final TreeSet startPosKeySet, final byte[] ref, final GenomeLoc refLoc ) { if ( haplotypes == null ) throw new IllegalArgumentException("haplotypes cannot be null"); - if ( haplotypeReadMap == null ) throw new IllegalArgumentException("haplotypeReadMap cannot be null"); + if ( readLikelihoods == null ) throw new IllegalArgumentException("readLikelihoods cannot be null"); if ( startPosKeySet == null ) throw new IllegalArgumentException("startPosKeySet cannot be null"); if ( ref == null ) throw new IllegalArgumentException("ref cannot be null"); if ( refLoc == null ) throw new IllegalArgumentException("refLoc cannot be null"); @@ -114,8 +117,8 @@ public class LDMerger extends MergeVariantsAcrossHaplotypes { if( startPosKeySet.size() <= 1 ) { return false; } - final int nSamples = haplotypeReadMap.keySet().size(); - final HaplotypeLDCalculator r2Calculator = new HaplotypeLDCalculator(haplotypes, haplotypeReadMap); + final int nSamples = readLikelihoods.sampleCount(); + final HaplotypeLDCalculator r2Calculator = new HaplotypeLDCalculator(haplotypes, readLikelihoods); boolean somethingWasMerged = false; boolean mapWasUpdated = true; while( mapWasUpdated ) { @@ -207,7 +210,7 @@ public class LDMerger extends MergeVariantsAcrossHaplotypes { * @param haplotypes our haplotypes * @param thisStart the starting position of the first event to merge * @param nextStart the starting position of the next event to merge - * @return + * @return never {@code null}. */ private LDMergeData getPairOfEventsToMerge(final List haplotypes, final int thisStart, final int nextStart) { final LDMergeData mergeData = new LDMergeData(); diff --git a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotype/MergeVariantsAcrossHaplotypes.java b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotype/MergeVariantsAcrossHaplotypes.java index f3565e4b8..3add2a41b 100644 --- a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotype/MergeVariantsAcrossHaplotypes.java +++ b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotype/MergeVariantsAcrossHaplotypes.java @@ -47,10 +47,9 @@ package org.broadinstitute.gatk.utils.haplotype; import org.broadinstitute.gatk.utils.GenomeLoc; -import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods; import java.util.List; -import java.util.Map; import java.util.TreeSet; /** @@ -63,14 +62,14 @@ public class MergeVariantsAcrossHaplotypes { * Merge variants across the haplotypes, updating the haplotype event maps and startPos set as appropriate * * @param haplotypes a list of haplotypes whose events we want to merge - * @param haplotypeReadMap map from sample name -> read likelihoods for each haplotype + * @param readLikelihoods map from sample name -> read likelihoods for each haplotype * @param startPosKeySet a set of starting positions of all events among the haplotypes * @param ref the reference bases * @param refLoc the span of the reference bases * @return true if anything was merged */ public boolean merge( final List haplotypes, - final Map haplotypeReadMap, + final ReadLikelihoods readLikelihoods, final TreeSet startPosKeySet, final byte[] ref, final GenomeLoc refLoc ) { diff --git a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java index ac9706011..ede47f03b 100644 --- a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java +++ b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotypeBAMWriter/AllHaplotypeBAMWriter.java @@ -47,13 +47,13 @@ package org.broadinstitute.gatk.utils.haplotypeBAMWriter; import org.broadinstitute.gatk.utils.GenomeLoc; -import org.broadinstitute.gatk.utils.genotyper.MostLikelyAllele; -import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods; import org.broadinstitute.gatk.utils.haplotype.Haplotype; import org.broadinstitute.gatk.utils.sam.GATKSAMRecord; -import htsjdk.variant.variantcontext.Allele; -import java.util.*; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; /** * A haplotype bam writer that writes out all haplotypes as reads and then @@ -66,6 +66,7 @@ import java.util.*; * Time: 1:50 PM */ class AllHaplotypeBAMWriter extends HaplotypeBAMWriter { + public AllHaplotypeBAMWriter(final ReadDestination destination) { super(destination); } @@ -78,13 +79,11 @@ class AllHaplotypeBAMWriter extends HaplotypeBAMWriter { final GenomeLoc paddedReferenceLoc, final Collection bestHaplotypes, final Set calledHaplotypes, - final Map stratifiedReadMap) { + final ReadLikelihoods readLikelihoods) { writeHaplotypesAsReads(haplotypes, new HashSet<>(bestHaplotypes), paddedReferenceLoc); - - for ( final PerReadAlleleLikelihoodMap readAlleleLikelihoodMap : stratifiedReadMap.values() ) { - for( final GATKSAMRecord read : readAlleleLikelihoodMap.getLikelihoodReadMap().keySet() ) { + final int sampleCount = readLikelihoods.sampleCount(); + for (int s = 0; s < sampleCount; s++) + for (final GATKSAMRecord read : readLikelihoods.sampleReads(s)) writeReadAgainstHaplotype(read); - } - } } } diff --git a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java index b1c0acb74..ff1afa0f7 100644 --- a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java +++ b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotypeBAMWriter/CalledHaplotypeBAMWriter.java @@ -47,15 +47,11 @@ package org.broadinstitute.gatk.utils.haplotypeBAMWriter; import org.broadinstitute.gatk.utils.GenomeLoc; -import org.broadinstitute.gatk.utils.genotyper.MostLikelyAllele; -import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods; import org.broadinstitute.gatk.utils.haplotype.Haplotype; import org.broadinstitute.gatk.utils.sam.GATKSAMRecord; -import htsjdk.variant.variantcontext.Allele; import java.util.Collection; -import java.util.HashMap; -import java.util.Map; import java.util.Set; /** @@ -82,16 +78,15 @@ class CalledHaplotypeBAMWriter extends HaplotypeBAMWriter { final GenomeLoc paddedReferenceLoc, final Collection bestHaplotypes, final Set calledHaplotypes, - final Map stratifiedReadMap) { + final ReadLikelihoods readLikelihoods) { if ( calledHaplotypes.isEmpty() ) // only write out called haplotypes return; writeHaplotypesAsReads(calledHaplotypes, calledHaplotypes, paddedReferenceLoc); - for ( final PerReadAlleleLikelihoodMap readAlleleLikelihoodMap : stratifiedReadMap.values() ) { - for ( final GATKSAMRecord read : readAlleleLikelihoodMap.getLikelihoodReadMap().keySet() ) { + final int sampleCount = readLikelihoods.sampleCount(); + for (int s = 0; s < sampleCount; s++) + for (final GATKSAMRecord read : readLikelihoods.sampleReads(s)) writeReadAgainstHaplotype(read); - } - } } } diff --git a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java index 082cd73b5..8df32f902 100644 --- a/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java +++ b/protected/gatk-tools-protected/src/main/java/org/broadinstitute/gatk/utils/haplotypeBAMWriter/HaplotypeBAMWriter.java @@ -46,22 +46,18 @@ package org.broadinstitute.gatk.utils.haplotypeBAMWriter; -import htsjdk.samtools.Cigar; import htsjdk.samtools.SAMFileHeader; import htsjdk.samtools.SAMTag; import org.broadinstitute.gatk.engine.io.GATKSAMFileWriter; import org.broadinstitute.gatk.utils.GenomeLoc; import org.broadinstitute.gatk.utils.Utils; -import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods; import org.broadinstitute.gatk.utils.haplotype.Haplotype; import org.broadinstitute.gatk.utils.sam.AlignmentUtils; -import org.broadinstitute.gatk.utils.sam.CigarUtils; import org.broadinstitute.gatk.utils.sam.GATKSAMRecord; -import org.broadinstitute.gatk.utils.smithwaterman.SWPairwiseAlignment; import java.util.Collection; import java.util.HashSet; -import java.util.Map; import java.util.Set; /** @@ -155,17 +151,17 @@ public abstract class HaplotypeBAMWriter { * @param paddedReferenceLoc the span of the based reference here * @param bestHaplotypes a list of the best (a subset of all) haplotypes that actually went forward into genotyping * @param calledHaplotypes a list of the haplotypes at where actually called as non-reference - * @param stratifiedReadMap a map from sample -> likelihoods for each read for each of the best haplotypes + * @param readLikelihoods a map from sample -> likelihoods for each read for each of the best haplotypes */ public abstract void writeReadsAlignedToHaplotypes(final Collection haplotypes, final GenomeLoc paddedReferenceLoc, final Collection bestHaplotypes, final Set calledHaplotypes, - final Map stratifiedReadMap); + final ReadLikelihoods readLikelihoods); public void writeReadsAlignedToHaplotypes(final Collection haplotypes, final GenomeLoc paddedReferenceLoc, - final Map stratifiedReadMap) { + final ReadLikelihoods stratifiedReadMap) { writeReadsAlignedToHaplotypes(haplotypes, paddedReferenceLoc, haplotypes, new HashSet<>(haplotypes), stratifiedReadMap); } @@ -210,7 +206,7 @@ public abstract class HaplotypeBAMWriter { record.setCigar(AlignmentUtils.consolidateCigar(haplotype.getCigar())); record.setMappingQuality(isAmongBestHaplotypes ? 60 : 0); record.setReadName("HC" + uniqueNameCounter++); - record.setAttribute(AlignmentUtils.HAPLOTYPE_TAG, haplotype.hashCode()); + record.setAttribute(AlignmentUtils.HAPLOTYPE_TAG,haplotype.hashCode()); record.setReadUnmappedFlag(false); record.setReferenceIndex(paddedRefLoc.getContigIndex()); record.setAttribute(SAMTag.RG.toString(), READ_GROUP_ID); diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/AlleleBiasedDownsamplingUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/AlleleBiasedDownsamplingUtils.java index 534facd44..0bcf4ee62 100644 --- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/AlleleBiasedDownsamplingUtils.java +++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/engine/downsampling/AlleleBiasedDownsamplingUtils.java @@ -25,13 +25,16 @@ package org.broadinstitute.gatk.engine.downsampling; -import org.broadinstitute.gatk.utils.*; +import org.apache.log4j.Logger; +import org.broadinstitute.gatk.utils.BaseUtils; +import org.broadinstitute.gatk.utils.MathUtils; import org.broadinstitute.gatk.utils.collections.DefaultHashMap; import org.broadinstitute.gatk.utils.exceptions.GATKException; import org.broadinstitute.gatk.utils.exceptions.UserException; -import org.broadinstitute.gatk.utils.pileup.*; +import org.broadinstitute.gatk.utils.pileup.PileupElement; +import org.broadinstitute.gatk.utils.pileup.ReadBackedPileup; +import org.broadinstitute.gatk.utils.pileup.ReadBackedPileupImpl; import org.broadinstitute.gatk.utils.sam.GATKSAMRecord; -import org.broadinstitute.gatk.utils.BaseUtils; import org.broadinstitute.gatk.utils.text.XReadLines; import htsjdk.variant.variantcontext.Allele; @@ -39,8 +42,6 @@ import java.io.File; import java.io.IOException; import java.util.*; -import org.apache.log4j.Logger; - public class AlleleBiasedDownsamplingUtils { // define this class so that we can use Java generics below @@ -216,7 +217,7 @@ public class AlleleBiasedDownsamplingUtils { * @param downsamplingFraction the fraction of total reads to remove per allele * @return list of reads TO REMOVE from allele biased down-sampling */ - public static List selectAlleleBiasedReads(final Map> alleleReadMap, final double downsamplingFraction) { + public static List selectAlleleBiasedReads(final Map> alleleReadMap, final double downsamplingFraction) { int totalReads = 0; for ( final List reads : alleleReadMap.values() ) totalReads += reads.size(); diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/VariantAnnotatorEngine.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/VariantAnnotatorEngine.java index d875f15f5..60c882417 100644 --- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/VariantAnnotatorEngine.java +++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/tools/walkers/annotator/VariantAnnotatorEngine.java @@ -27,17 +27,18 @@ package org.broadinstitute.gatk.tools.walkers.annotator; import com.google.java.contract.Ensures; import com.google.java.contract.Requires; -import org.broadinstitute.gatk.utils.commandline.RodBinding; +import htsjdk.variant.variantcontext.*; +import htsjdk.variant.vcf.*; import org.broadinstitute.gatk.engine.GenomeAnalysisEngine; import org.broadinstitute.gatk.engine.contexts.AlignmentContext; import org.broadinstitute.gatk.engine.contexts.ReferenceContext; import org.broadinstitute.gatk.engine.refdata.RefMetaDataTracker; import org.broadinstitute.gatk.tools.walkers.annotator.interfaces.*; import org.broadinstitute.gatk.utils.GenomeLoc; +import org.broadinstitute.gatk.utils.commandline.RodBinding; import org.broadinstitute.gatk.utils.exceptions.UserException; import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap; -import htsjdk.variant.variantcontext.*; -import htsjdk.variant.vcf.*; +import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods; import java.util.*; @@ -204,6 +205,15 @@ public class VariantAnnotatorEngine { return annotateDBs(tracker, annotated); } + public VariantContext annotateContextForActiveRegion(final RefMetaDataTracker tracker, + final ReadLikelihoods readLikelihoods, + final VariantContext vc) { + //TODO we transform the read-likelihood into the Map^2 previous version for the sake of not changing of not changing annotation interface. + //TODO should we change those interfaces? + final Map annotationLikelihoods = readLikelihoods.toPerReadAlleleLikelihoodMap(); + return annotateContextForActiveRegion(tracker, annotationLikelihoods, vc); + } + public VariantContext annotateContextForActiveRegion(final RefMetaDataTracker tracker, final Map perReadAlleleLikelihoodMap, final VariantContext vc) { diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMM.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMM.java index da29e0c42..eb0d408e3 100644 --- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMM.java +++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/pairhmm/PairHMM.java @@ -29,6 +29,7 @@ import com.google.java.contract.Requires; import org.apache.log4j.Logger; import org.broadinstitute.gatk.utils.MathUtils; import org.broadinstitute.gatk.utils.genotyper.PerReadAlleleLikelihoodMap; +import org.broadinstitute.gatk.utils.genotyper.ReadLikelihoods; import org.broadinstitute.gatk.utils.haplotype.Haplotype; import org.broadinstitute.gatk.utils.sam.GATKSAMRecord; import htsjdk.variant.variantcontext.Allele; @@ -125,7 +126,27 @@ public abstract class PairHMM { * @param readMaxLength the max length of reads we want to use with this PairHMM */ public void initialize( final List haplotypes, final Map> perSampleReadList, final int readMaxLength, final int haplotypeMaxLength ) { - initialize(readMaxLength, haplotypeMaxLength); + initialize(readMaxLength, haplotypeMaxLength); + } + + private int findMaxReadLength(final GATKSAMRecord ... reads) { + int max = 0; + for (final GATKSAMRecord read : reads) { + final int readLength = read.getReadLength(); + if (max < readLength) + max = readLength; + } + return max; + } + + private int findMaxAlleleLength(final List alleles) { + int max = 0; + for (final Allele allele : alleles) { + final int alleleLength = allele.length(); + if (max < alleleLength) + max = alleleLength; + } + return max; } protected int findMaxReadLength(final List reads) { @@ -147,6 +168,63 @@ public abstract class PairHMM { return listMaxHaplotypeLength; } + /** + * Given a list of reads and haplotypes, for every read compute the total probability of said read arising from + * each haplotype given base substitution, insertion, and deletion probabilities. + * + * @param processedReads reads to analyze + * @param likelihoods where to store the likelihoods where position [a][r] is reserved for the likelihood of {@code reads[r]} + * conditional to {@code alleles[a]}. + * @param constantGCP constant penalty for gap continuations. + * + * @return never {@code null}. + */ + public void computeLikelihoods(final ReadLikelihoods.Matrix likelihoods, final List processedReads, final byte constantGCP) { + if(doProfiling) + startTime = System.nanoTime(); + // (re)initialize the pairHMM only if necessary + final int readMaxLength = findMaxReadLength(processedReads); + final int haplotypeMaxLength = findMaxAlleleLength(likelihoods.alleles()); + if (!initialized || readMaxLength > maxReadLength || haplotypeMaxLength > maxHaplotypeLength) + initialize(readMaxLength, haplotypeMaxLength); + + final int readCount = processedReads.size(); + final List alleles = likelihoods.alleles(); + final int alleleCount = alleles.size(); + mLikelihoodArray = new double[readCount * alleleCount]; + int idx = 0; + int readIndex = 0; + for(final GATKSAMRecord read : processedReads){ + final int readLength = read.getReadLength(); + final byte[] readBases = read.getReadBases(); + final byte[] readQuals = read.getBaseQualities(); + final byte[] readInsQuals = read.getBaseInsertionQualities(); + final byte[] readDelQuals = read.getBaseDeletionQualities(); + final byte[] overallGCP = new byte[readLength]; + Arrays.fill(overallGCP,constantGCP); + + // peak at the next haplotype in the list (necessary to get nextHaplotypeBases, which is required for caching in the array implementation) + final boolean isFirstHaplotype = true; + for (int a = 0; a < alleleCount; a++) { + final Allele allele = alleles.get(a); + final byte[] alleleBases = allele.getBases(); + final byte[] nextAlleleBases = a == alleles.size() - 1 ? null : alleles.get(a + 1).getBases(); + final double lk = computeReadLikelihoodGivenHaplotypeLog10(alleleBases, + readBases, readQuals, readInsQuals, readDelQuals, overallGCP, isFirstHaplotype, nextAlleleBases); + likelihoods.set(a, readIndex, lk); + mLikelihoodArray[idx++] = lk; + } + readIndex++; + } + if(doProfiling) { + threadLocalPairHMMComputeTimeDiff = (System.nanoTime() - startTime); + //synchronized(doProfiling) + { + pairHMMComputeTime += threadLocalPairHMMComputeTimeDiff; + } + } + } + /** * Given a list of reads and haplotypes, for every read compute the total probability of said read arising from * each haplotype given base substitution, insertion, and deletion probabilities. @@ -156,6 +234,7 @@ public abstract class PairHMM { * @param GCPArrayMap Each read is associated with an array containing the gap continuation penalties for use in the model. Length of each GCP-array must match that of its read. * @return a PerReadAlleleLikelihoodMap containing each read, haplotype-allele, and the log10 probability of * said read coming from the said haplotype under the provided error model + * @deprecated */ public PerReadAlleleLikelihoodMap computeLikelihoods(final List reads, final Map alleleHaplotypeMap, final Map GCPArrayMap) { if(doProfiling) @@ -178,7 +257,7 @@ public abstract class PairHMM { // peak at the next haplotype in the list (necessary to get nextHaplotypeBases, which is required for caching in the array implementation) byte[] currentHaplotypeBases = null; - boolean isFirstHaplotype = true; + final boolean isFirstHaplotype = true; Allele currentAllele = null; double log10l; //for (final Allele allele : alleleHaplotypeMap.keySet()){ diff --git a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentUtils.java b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentUtils.java index 289275abd..4c9a4447b 100644 --- a/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentUtils.java +++ b/public/gatk-tools-public/src/main/java/org/broadinstitute/gatk/utils/sam/AlignmentUtils.java @@ -31,7 +31,6 @@ import htsjdk.samtools.Cigar; import htsjdk.samtools.CigarElement; import htsjdk.samtools.CigarOperator; import htsjdk.samtools.SAMRecord; -import org.broadinstitute.gatk.engine.GenomeAnalysisEngine; import org.broadinstitute.gatk.utils.BaseUtils; import org.broadinstitute.gatk.utils.exceptions.ReviewedGATKException; import org.broadinstitute.gatk.utils.haplotype.Haplotype; @@ -75,7 +74,11 @@ public final class AlignmentUtils { * @param haplotype the haplotype that the read should be aligned to, before aligning to the reference * @param referenceStart the start of the reference that haplotype is aligned to. Provides global coordinate frame. * @param isInformative true if the read is differentially informative for one of the haplotypes - * @return a GATKSAMRecord aligned to reference, or null if no meaningful alignment is possible + * + * @throws IllegalArgumentException if {@code originalRead} is {@code null} or {@code haplotype} is {@code null} or it + * does not have a Cigar or the {@code referenceStart} is invalid (less than 1). + * + * @return a GATKSAMRecord aligned to reference. Never {@code null}. */ public static GATKSAMRecord createReadAlignedToRef(final GATKSAMRecord originalRead, final Haplotype haplotype,