Removed used routines in duputils; duplicatequals to archive; docs for new duplicate traversal code; general code cleanup; bug fixes for combineduplicates; integration tests for combine duplicates walker
git-svn-id: file:///humgen/gsa-scr1/gsa-engineering/svn_contents/trunk@2468 348d0f76-0448-11de-a6fe-93d51630548a
This commit is contained in:
parent
29f94119d1
commit
87e863b48d
|
|
@ -61,6 +61,7 @@ public class TraverseDuplicates extends TraversalEngine {
|
|||
/** descriptor of the type */
|
||||
private static final String DUPS_STRING = "dups";
|
||||
|
||||
/** Turn this to true to enable logger.debug output */
|
||||
private final boolean DEBUG = false;
|
||||
|
||||
private List<SAMRecord> readsAtLoc(final SAMRecord read, PushbackIterator<SAMRecord> iter) {
|
||||
|
|
@ -73,11 +74,9 @@ public class TraverseDuplicates extends TraversalEngine {
|
|||
|
||||
// the next read starts too late
|
||||
if (site2.getStart() != site.getStart()) {
|
||||
//System.out.printf("site = %s, site2 = %s%n", site, site2);
|
||||
iter.pushback(read2);
|
||||
break;
|
||||
} else {
|
||||
//System.out.printf("Read is a duplicate: %s%n", read.format());
|
||||
l.add(read2);
|
||||
}
|
||||
}
|
||||
|
|
@ -85,11 +84,20 @@ public class TraverseDuplicates extends TraversalEngine {
|
|||
return l;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a set of lists of reads, where each list contains reads from the same underlying molecule according
|
||||
* to their duplicate flag and their (and mate, if applicable) start/end positions.
|
||||
*
|
||||
* @param reads the list of reads to split into unique molecular samples
|
||||
* @return
|
||||
*/
|
||||
protected Set<List<SAMRecord>> uniqueReadSets(List<SAMRecord> reads) {
|
||||
Set<List<SAMRecord>> readSets = new HashSet<List<SAMRecord>>();
|
||||
|
||||
// for each read, find duplicates, and either add the read to its duplicate list or start a new one
|
||||
for ( SAMRecord read : reads ) {
|
||||
|
||||
List<SAMRecord> readSet = findDuplicateReads(read, readSets);
|
||||
|
||||
if ( readSet == null ) {
|
||||
readSets.add(new ArrayList<SAMRecord>(Arrays.asList(read))); // copy so I can add to the list
|
||||
} else {
|
||||
|
|
@ -100,6 +108,15 @@ public class TraverseDuplicates extends TraversalEngine {
|
|||
return readSets;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find duplicate reads for read in the set of unique reads. This is effective a duplicate marking algorithm,
|
||||
* but it relies for safety's sake on the file itself being marked by a true duplicate marking algorithm. Pair
|
||||
* and single-end read aware.
|
||||
*
|
||||
* @param read
|
||||
* @param readSets
|
||||
* @return The list of duplicate reads that read is a member of, or null if it's the only one of its kind
|
||||
*/
|
||||
protected List<SAMRecord> findDuplicateReads(SAMRecord read, Set<List<SAMRecord>> readSets ) {
|
||||
if ( read.getReadPairedFlag() ) {
|
||||
// paired
|
||||
|
|
@ -107,8 +124,6 @@ public class TraverseDuplicates extends TraversalEngine {
|
|||
|
||||
for (List<SAMRecord> reads : readSets) {
|
||||
SAMRecord key = reads.get(0);
|
||||
//if (DEBUG)
|
||||
// logger.debug(String.format("Examining reads at %s vs. %s at %s / %s vs. %s / %s%n", key.getReadName(), read.getReadName(), keyLoc, keyMateLoc, readLoc, readMateLoc));
|
||||
|
||||
// read and key start at the same place, and either the this read and the key
|
||||
// share a mate location or the read is flagged as a duplicate
|
||||
|
|
@ -191,19 +206,11 @@ public class TraverseDuplicates extends TraversalEngine {
|
|||
Shard shard,
|
||||
ShardDataProvider dataProvider,
|
||||
T sum) {
|
||||
//logger.debug(String.format("TraverseDuplicates.traverse Genomic interval is %s", shard.getGenomeLoc()));
|
||||
|
||||
// safety first :-)
|
||||
if (!(walker instanceof DuplicateWalker))
|
||||
throw new IllegalArgumentException("Walker isn't a duplicate walker!");
|
||||
|
||||
DuplicateWalker<M, T> dupWalker = (DuplicateWalker<M, T>) walker;
|
||||
|
||||
// while we still have more reads
|
||||
// ok, here's the idea. We get all the reads that start at the same position in the genome
|
||||
// We then split the list of reads into sublists of reads:
|
||||
// -> those with the same mate pair position, for paired reads
|
||||
// -> those flagged as unpaired and duplicated but having the same start and end and
|
||||
|
||||
FilteringIterator filterIter = new FilteringIterator(new ReadView(dataProvider).iterator(), new duplicateStreamFilterFunc());
|
||||
PushbackIterator<SAMRecord> iter = new PushbackIterator<SAMRecord>(filterIter);
|
||||
|
||||
|
|
@ -219,7 +226,7 @@ public class TraverseDuplicates extends TraversalEngine {
|
|||
GenomeLoc site = GenomeLocParser.createGenomeLoc(read);
|
||||
|
||||
Set<List<SAMRecord>> readSets = uniqueReadSets(readsAtLoc(read, iter));
|
||||
logger.debug(String.format("*** TraverseDuplicates.traverse at %s with %d read sets", site, readSets.size()));
|
||||
if ( DEBUG ) logger.debug(String.format("*** TraverseDuplicates.traverse at %s with %d read sets", site, readSets.size()));
|
||||
|
||||
// Jump forward in the reference to this locus location
|
||||
AlignmentContext locus = new AlignmentContext(site, new ReadBackedPileup(site));
|
||||
|
|
@ -227,6 +234,7 @@ public class TraverseDuplicates extends TraversalEngine {
|
|||
// update the number of duplicate sets we've seen
|
||||
TraversalStatistics.nRecords++;
|
||||
|
||||
// actually call filter and map, accumulating sum
|
||||
final boolean keepMeP = dupWalker.filter(site, locus, readSets);
|
||||
if (keepMeP) {
|
||||
M x = dupWalker.map(site, locus, readSets);
|
||||
|
|
|
|||
|
|
@ -22,21 +22,6 @@ public abstract class DuplicateWalker<MapType, ReduceType> extends Walker<MapTyp
|
|||
return true; // We are keeping all the reads
|
||||
}
|
||||
|
||||
/**
|
||||
* Called by the traversal engine to decide whether to send non-duplicates as lists of
|
||||
* singleton reads to the map function. By default it's false.
|
||||
*
|
||||
* @return true if you want to see non duplicates during the traversal
|
||||
*/
|
||||
public boolean mapUniqueReadsTooP() { return false; }
|
||||
|
||||
/**
|
||||
* Called by the traversal engine to decide whether to call map() at loci without duplicate reads
|
||||
*
|
||||
* @return true if you want to see non duplicates during the traversal
|
||||
*/
|
||||
public boolean mapAtLociWithoutDuplicates() { return true; }
|
||||
|
||||
public abstract MapType map(GenomeLoc loc, AlignmentContext context, Set<List<SAMRecord>> readSets );
|
||||
|
||||
// Given result of map function
|
||||
|
|
|
|||
|
|
@ -1,244 +0,0 @@
|
|||
/*
|
||||
* Copyright (c) 2009 The Broad Institute
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person
|
||||
* obtaining a copy of this software and associated documentation
|
||||
* files (the "Software"), to deal in the Software without
|
||||
* restriction, including without limitation the rights to use,
|
||||
* copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the
|
||||
* Software is furnished to do so, subject to the following
|
||||
* conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be
|
||||
* included in all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
* OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
|
||||
package org.broadinstitute.sting.oneoffprojects.walkers;
|
||||
|
||||
import net.sf.samtools.SAMRecord;
|
||||
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
|
||||
import org.broadinstitute.sting.gatk.walkers.DuplicateWalker;
|
||||
import org.broadinstitute.sting.utils.BaseUtils;
|
||||
import org.broadinstitute.sting.utils.GenomeLoc;
|
||||
import org.broadinstitute.sting.utils.Pair;
|
||||
import org.broadinstitute.sting.utils.QualityUtils;
|
||||
import org.broadinstitute.sting.utils.cmdLine.Argument;
|
||||
import org.broadinstitute.sting.utils.duplicates.DupUtils;
|
||||
import org.broadinstitute.sting.utils.duplicates.DuplicateComp;
|
||||
|
||||
import java.io.PrintStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
class MismatchCounter {
|
||||
long nObs = 0;
|
||||
long nMismatches = 0;
|
||||
|
||||
public void inc(long incNObs, long incNMismatches) {
|
||||
nObs += incNObs;
|
||||
nMismatches += incNMismatches;
|
||||
}
|
||||
|
||||
public void inc(boolean mismatchP) {
|
||||
inc(1, mismatchP ? 1 : 0);
|
||||
}
|
||||
|
||||
|
||||
public double mismatchRate() {
|
||||
return (double)nMismatches / nObs;
|
||||
}
|
||||
|
||||
public byte empiricalQualScore() {
|
||||
return QualityUtils.probToQual(1 - mismatchRate(), 0);
|
||||
}
|
||||
|
||||
public String headerString() {
|
||||
return "mismatchRate\tempiricalQ\tnObs\tnMismatches";
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return String.format("%.10f\t%d\t%d\t%6d", mismatchRate(), empiricalQualScore(), nObs, nMismatches);
|
||||
}
|
||||
}
|
||||
|
||||
class QualityTracker {
|
||||
final private int MAX_QUAL_SCORE = 100;
|
||||
MismatchCounter[][] mismatchesByQ = new MismatchCounter[MAX_QUAL_SCORE][MAX_QUAL_SCORE];
|
||||
|
||||
public QualityTracker() {
|
||||
for ( int i = 0; i < MAX_QUAL_SCORE; i++ ) {
|
||||
for ( int j = 0; j < MAX_QUAL_SCORE; j++ ) {
|
||||
mismatchesByQ[i][j] = new MismatchCounter();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void inc(int b1Qi, int b2Qi, boolean mismatchP, boolean orderDependent) {
|
||||
int b1Q = orderDependent ? b1Qi : Math.max(b1Qi, b2Qi);
|
||||
int b2Q = orderDependent ? b2Qi : Math.min(b1Qi, b2Qi);
|
||||
|
||||
if ( b1Q > MAX_QUAL_SCORE ) throw new RuntimeException("Unexpectedly large base quality " + b1Q);
|
||||
if ( b2Q > MAX_QUAL_SCORE ) throw new RuntimeException("Unexpectedly large base quality " + b2Q);
|
||||
|
||||
mismatchesByQ[b1Q][b2Q].inc(mismatchP);
|
||||
}
|
||||
|
||||
public void inc(DuplicateComp dc, boolean orderDependent) {
|
||||
inc(dc.getQLarger(), dc.getQSmaller(), dc.isMismatchP(), orderDependent);
|
||||
}
|
||||
|
||||
public int probMismatchQ1Q2(int q1, int q2) {
|
||||
double e1 = 1 - QualityUtils.qualToProb(q1);
|
||||
double e2 = 1 - QualityUtils.qualToProb(q2);
|
||||
double eMM = e1 * (1 - e2) + (1 - e1) * e2 - 1/3 * e1 * e2;
|
||||
return QualityUtils.probToQual(1 - eMM, 0.0);
|
||||
}
|
||||
|
||||
public void printToStream(PrintStream out, boolean filterUnobserved) {
|
||||
out.printf("Q1\tQ2\tQmin\t%s%n", mismatchesByQ[0][0].headerString());
|
||||
for ( int i = 0; i < MAX_QUAL_SCORE; i++ ) {
|
||||
for ( int j = 0; j < MAX_QUAL_SCORE; j++ ) {
|
||||
MismatchCounter mc = mismatchesByQ[i][j];
|
||||
//System.out.printf("MC = %s%n", mc);
|
||||
if ( filterUnobserved && mc.nObs == 0 )
|
||||
continue;
|
||||
out.printf("%d\t%d\t%d\t%s\t%n", i, j, probMismatchQ1Q2(i,j), mc.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public class DuplicateQualsWalker extends DuplicateWalker<List<DuplicateComp>, QualityTracker> {
|
||||
@Argument(fullName="filterUnobservedQuals", required=false, doc="Show only quality bins with at least one observation in the data")
|
||||
public boolean FILTER_UNOBSERVED_QUALS = false;
|
||||
|
||||
@Argument(fullName="maxPairwiseCompsPerDupSet", required=false, doc="Maximumize number of pairwise comparisons to perform among duplicate read sets")
|
||||
public int MAX_PAIRSIZE_COMPS_PER_DUPLICATE_SET = 100;
|
||||
|
||||
@Argument(fullName="combinedQuals", required=false, doc="Combine and assess pairwise base qualities")
|
||||
public boolean COMBINE_QUALS = false;
|
||||
|
||||
@Argument(fullName="combineAllDups", required=false, doc="Combine and assess pairwise base qualities")
|
||||
public boolean COMBINE_ALL_DUPS = false;
|
||||
|
||||
@Argument(fullName="orderDependent", required=false, doc="")
|
||||
public boolean orderDependent = false;
|
||||
|
||||
@Argument(fullName="compareToUniqueReads", required=false, doc="If true, then we will compare only to unique (i.e., non-duplicated molecules) at the same duplicate site")
|
||||
public boolean compareToUniqueReads = false;
|
||||
|
||||
@Argument(fullName="comparePairToSingleton", required=false, doc="If true, then we will compare a combined dup to a random other read in the duplicate set, not a combined pair itself")
|
||||
public boolean comparePairToSingleton = false;
|
||||
|
||||
final boolean DEBUG = false;
|
||||
final private boolean ACTUALLY_DO_WORK = true;
|
||||
|
||||
public void onTraversalDone(QualityTracker result) {
|
||||
result.printToStream(out, FILTER_UNOBSERVED_QUALS);
|
||||
}
|
||||
|
||||
public QualityTracker reduceInit() {
|
||||
return new QualityTracker();
|
||||
}
|
||||
|
||||
public QualityTracker reduce(List<DuplicateComp> dupComps, QualityTracker tracker) {
|
||||
for ( DuplicateComp dc : dupComps ) {
|
||||
tracker.inc(dc, orderDependent);
|
||||
}
|
||||
|
||||
return tracker;
|
||||
}
|
||||
|
||||
// Print out data for regression
|
||||
public List<DuplicateComp> map(GenomeLoc loc, AlignmentContext context, Set<List<SAMRecord>> readSets ) {
|
||||
//logger.info(String.format("%s has %d duplicates and %d non-duplicates", loc, duplicateReads.size(), uniqueReads.size()));
|
||||
List<DuplicateComp> pairwiseComps = new ArrayList<DuplicateComp>();
|
||||
|
||||
// todo -- fixme -- the logic here is all wrong given new interface
|
||||
// if ( ! ACTUALLY_DO_WORK )
|
||||
// return pairwiseComps;
|
||||
//
|
||||
// if ( COMBINE_QUALS ) {
|
||||
// Pair<SAMRecord, SAMRecord> combinedReads = DupUtils.combinedReadPair( duplicateReads );
|
||||
// if ( combinedReads != null ) {
|
||||
// SAMRecord combined1 = combinedReads.first;
|
||||
// SAMRecord combined2 = combinedReads.second;
|
||||
//
|
||||
// if ( comparePairToSingleton )
|
||||
// pairwiseComps = addPairwiseMatches( pairwiseComps, combined1, duplicateReads.get(2), uniqueReads );
|
||||
// else
|
||||
// pairwiseComps = addPairwiseMatches( pairwiseComps, combined1, combined2, uniqueReads );
|
||||
// }
|
||||
// } else {
|
||||
// int nComparisons = 0;
|
||||
// for ( SAMRecord read1 : duplicateReads ) {
|
||||
// for ( SAMRecord read2 : duplicateReads ) {
|
||||
// if ( read1.hashCode() < read2.hashCode() && DupUtils.usableDuplicate(read1, read2) ) {
|
||||
// // the hashcode insures we don't do A vs. B and B vs. A
|
||||
// //System.out.printf("Comparing %s against %s%n", read1, read2);
|
||||
// nComparisons++;
|
||||
// pairwiseComps = addPairwiseMatches( pairwiseComps, read1, read2, uniqueReads );
|
||||
// if ( nComparisons > MAX_PAIRSIZE_COMPS_PER_DUPLICATE_SET )
|
||||
// break;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
return pairwiseComps;
|
||||
}
|
||||
|
||||
private List<DuplicateComp> addPairwiseMatches(List<DuplicateComp> comps,
|
||||
SAMRecord read1, SAMRecord read2,
|
||||
List<SAMRecord> uniqueReads ) {
|
||||
if ( compareToUniqueReads ) {
|
||||
// we want to compare to a read in the unique read set
|
||||
if ( uniqueReads.size() > 0 ) { // there's actually something to compare to
|
||||
SAMRecord uniqueRead = uniqueReads.get(0); // might as well get the first one
|
||||
return pairwiseMatches(comps, read1, uniqueRead);
|
||||
} else {
|
||||
return comps;
|
||||
}
|
||||
} else {
|
||||
// default, just do read1 vs. read2
|
||||
return pairwiseMatches(comps, read1, read2);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the pairwise mismatches between reads read1 and read2 and adds the result to the comps list.
|
||||
* Doesn't contain any logic deciding what to compare, just does read1 and read2
|
||||
*
|
||||
* @param comps
|
||||
* @param read1
|
||||
* @param read2
|
||||
* @return
|
||||
*/
|
||||
private List<DuplicateComp> pairwiseMatches(List<DuplicateComp> comps, SAMRecord read1, SAMRecord read2 ) {
|
||||
byte[] read1Bases = read1.getReadBases();
|
||||
byte[] read1Quals = read1.getBaseQualities();
|
||||
byte[] read2Bases = read2.getReadBases();
|
||||
byte[] read2Quals = read2.getBaseQualities();
|
||||
|
||||
for ( int i = 0; i < read1Bases.length; i++) {
|
||||
byte qual1 = read1Quals[i];
|
||||
byte qual2 = read2Quals[i];
|
||||
boolean mismatchP = ! BaseUtils.basesAreEqual(read1Bases[i], read2Bases[i]);
|
||||
DuplicateComp dc = new DuplicateComp(qual1, qual2, mismatchP);
|
||||
comps.add(dc);
|
||||
}
|
||||
|
||||
return comps;
|
||||
}
|
||||
}
|
||||
|
|
@ -21,21 +21,10 @@ public class CombineDuplicatesWalker extends DuplicateWalker<List<SAMRecord>, SA
|
|||
@Argument(fullName="outputBAM", shortName="outputBAM", required=false, doc="BAM File to write combined duplicates to")
|
||||
public SAMFileWriter outputBAM = null;
|
||||
|
||||
@Argument(fullName="includeUniqueReads", shortName="includeUniqueReads", required=false, doc="If true, also writes out non-duplicate reads in file")
|
||||
public boolean INCLUDE_UNIQUE_READS = true;
|
||||
|
||||
@Argument(fullName="maxQ", shortName="maxQ", required=false,
|
||||
doc="The maximum Q score allowed for combined reads, reflects the background error rate giving rise to perfect bases that don't correspond to the reference")
|
||||
public int MAX_QUALITY_SCORE = 50;
|
||||
|
||||
/**
|
||||
* do we want to include unqiue reads?
|
||||
* @return the user specified command line argument INCLUDE_UNIQUE_READS
|
||||
*/
|
||||
public boolean mapUniqueReadsTooP() {
|
||||
return INCLUDE_UNIQUE_READS;
|
||||
}
|
||||
|
||||
/**
|
||||
* start the walker with the command line argument specified SAMFileWriter
|
||||
* @return a sam file writer, which may be null
|
||||
|
|
@ -59,11 +48,15 @@ public class CombineDuplicatesWalker extends DuplicateWalker<List<SAMRecord>, SA
|
|||
return output;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* We don't want to see loci without duplicates, since
|
||||
* @return
|
||||
* when we're done, print out the collected stats
|
||||
* @param result the result of the traversal engine, to be printed out
|
||||
*/
|
||||
public boolean mapAtLociWithoutDuplicates() { return false; }
|
||||
public void onTraversalDone(SAMFileWriter result) {
|
||||
return; // don't do anything
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Build a combined read given the input list of non-unique reads. If there's just one read in the
|
||||
|
|
@ -86,10 +79,12 @@ public class CombineDuplicatesWalker extends DuplicateWalker<List<SAMRecord>, SA
|
|||
combinedRead = reads.get(0);
|
||||
} else {
|
||||
// actually call the combine function
|
||||
//for (SAMRecord read : duplicateReads ) {
|
||||
// System.out.printf("Read %s%n", read.format());
|
||||
//}
|
||||
// for (SAMRecord read : reads ) {
|
||||
// out.printf("Combining Read %s%n", read.format());
|
||||
// }
|
||||
//
|
||||
combinedRead = DupUtils.combineDuplicates(reads, MAX_QUALITY_SCORE);
|
||||
//out.printf(" => into %s%n", combinedRead.format());
|
||||
}
|
||||
|
||||
if ( combinedRead.getDuplicateReadFlag() )
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ class DuplicateCount {
|
|||
|
||||
/**
|
||||
* Count the number of unique reads, duplicates, and the average depth of unique reads and duplicates at all positions.
|
||||
* @author aaron
|
||||
* @author mark DePristo
|
||||
*/
|
||||
public class CountDuplicatesWalker extends DuplicateWalker<DuplicateCount, DuplicateCount> {
|
||||
@Argument(fullName="quiet", required=false, doc="If true, per locus information isn't printex")
|
||||
|
|
|
|||
|
|
@ -2,109 +2,21 @@ package org.broadinstitute.sting.utils.duplicates;
|
|||
|
||||
import net.sf.samtools.SAMRecord;
|
||||
import org.broadinstitute.sting.utils.*;
|
||||
import org.broadinstitute.sting.utils.pileup.ReadBackedPileup;
|
||||
import org.broadinstitute.sting.utils.pileup.PileupElement;
|
||||
import org.broadinstitute.sting.oneoffprojects.multisamplecaller.BasicPileup;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class DupUtils {
|
||||
public static boolean usableDuplicate( SAMRecord read1, SAMRecord read2 ) {
|
||||
return read1 != read2 && read1.getReadLength() == read2.getReadLength();
|
||||
}
|
||||
|
||||
public static Pair<SAMRecord, SAMRecord> combinedReadPair( List<SAMRecord> duplicateReads ) {
|
||||
if ( duplicateReads.size() < 4 )
|
||||
return null;
|
||||
|
||||
SAMRecord c1 = combine2Duplicates(duplicateReads.get(0),duplicateReads.get(1));
|
||||
SAMRecord c2 = combine2Duplicates(duplicateReads.get(2),duplicateReads.get(3));
|
||||
return new Pair<SAMRecord, SAMRecord>(c1, c2);
|
||||
}
|
||||
|
||||
public static SAMRecord sample3rdRead( List<SAMRecord> duplicateReads, SAMRecord read1, SAMRecord read2 ) {
|
||||
if ( duplicateReads.size() <= 2 ) {
|
||||
// no third unique read is available
|
||||
return null;
|
||||
} else {
|
||||
for ( SAMRecord read3 : duplicateReads ) {
|
||||
if ( usableDuplicate(read1, read3) && usableDuplicate(read2, read3) )
|
||||
return read3;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public static SAMRecord tmpCopyRead(SAMRecord read) {
|
||||
SAMRecord copy = new SAMRecord(read.getHeader());
|
||||
copy.setReadName(read.getReadName());
|
||||
//copy.setReadString(final String value) {
|
||||
copy.setReadBases(read.getReadBases());
|
||||
copy.setBaseQualities(read.getBaseQualities());
|
||||
copy.setReferenceName(read.getReferenceName());
|
||||
copy.setReferenceIndex(read.getReferenceIndex());
|
||||
copy.setMateReferenceName(read.getMateReferenceName());
|
||||
copy.setMateReferenceIndex(read.getMateReferenceIndex());
|
||||
copy.setAlignmentStart(read.getAlignmentStart());
|
||||
//copy.setAlignmentEnd(read.getAlignmentEnd());
|
||||
copy.setMateAlignmentStart(read.getMateAlignmentStart());
|
||||
copy.setInferredInsertSize(read.getInferredInsertSize());
|
||||
copy.setMappingQuality(read.getMappingQuality());
|
||||
copy.setCigar(read.getCigar());
|
||||
copy.setFlags(read.getFlags());
|
||||
|
||||
return copy;
|
||||
}
|
||||
|
||||
public static SAMRecord combine2Duplicates(SAMRecord read1, SAMRecord read2) {
|
||||
byte[] read1Bases = read1.getReadBases();
|
||||
byte[] read1Quals = read1.getBaseQualities();
|
||||
byte[] read2Bases = read2.getReadBases();
|
||||
byte[] read2Quals = read2.getBaseQualities();
|
||||
|
||||
byte[] bases = new byte[read1Bases.length];
|
||||
byte[] quals = new byte[read1Bases.length];
|
||||
|
||||
SAMRecord c = tmpCopyRead(read1);
|
||||
for ( int i = 0; i < read1Bases.length; i++) {
|
||||
byte base1 = read1Bases[i];
|
||||
byte base2 = read2Bases[i];
|
||||
byte qual1 = read1Quals[i];
|
||||
byte qual2 = read2Quals[i];
|
||||
|
||||
Pair<Byte, Integer> combined = combine2BasesAndQuals(base1, base2, qual1, qual2);
|
||||
|
||||
bases[i] = combined.getFirst();
|
||||
quals[i] = QualityUtils.boundQual(combined.getSecond());
|
||||
|
||||
// if ( DEBUG )
|
||||
// logger.debug(String.format("Combining %s (Q%2d) with %s (Q%2d) -> %s (Q%2d)%s%n",
|
||||
// (char)base1, qual1, (char)base2, qual2, (char)bases[i], quals[i],
|
||||
// base1 == base2 ? "" : " [MISMATCH]"));
|
||||
private static SAMRecord tmpCopyRead(SAMRecord read) {
|
||||
try {
|
||||
return (SAMRecord)read.clone();
|
||||
} catch ( CloneNotSupportedException e ) {
|
||||
throw new StingException("Unexpected Clone failure!");
|
||||
}
|
||||
c.setReadBases(bases);
|
||||
c.setBaseQualities(quals);
|
||||
|
||||
return c;
|
||||
}
|
||||
|
||||
public static Pair<Byte, Integer> combine2BasesAndQuals(byte base1, byte base2, int qual1, int qual2) {
|
||||
byte cbase;
|
||||
int cqual;
|
||||
|
||||
if ( base1 == base2 ) {
|
||||
// agreement
|
||||
cbase = base1;
|
||||
cqual = qual1 + qual2;
|
||||
} else {
|
||||
// disagreement
|
||||
cbase = qual1 > qual2 ? base1 : base2;
|
||||
//cqual = Math.max(qual1, qual2);
|
||||
cqual = Math.max(qual1, qual2) - Math.min(qual1, qual2);
|
||||
|
||||
}
|
||||
|
||||
return new Pair<Byte, Integer>(cbase, cqual);
|
||||
}
|
||||
|
||||
public static SAMRecord combineDuplicates(List<SAMRecord> duplicates, int maxQScore) {
|
||||
|
|
@ -114,6 +26,7 @@ public class DupUtils {
|
|||
// make the combined read by copying the first read and setting the
|
||||
// bases and quals to new arrays
|
||||
SAMRecord comb = tmpCopyRead(duplicates.get(0));
|
||||
//SAMRecord comb = tmpCopyRead(duplicates.get(0));
|
||||
comb.setDuplicateReadFlag(false);
|
||||
int readLen = comb.getReadBases().length;
|
||||
byte[] bases = new byte[readLen];
|
||||
|
|
@ -125,8 +38,6 @@ public class DupUtils {
|
|||
// System.out.printf("dup base %c %d%n", (char)read.getReadBases()[i], read.getBaseQualities()[i]);
|
||||
//}
|
||||
Pair<Byte, Byte> baseAndQual = combineBaseProbs(duplicates, i, maxQScore);
|
||||
// baseAndQual = combineBasesByConsensus(duplicates, i);
|
||||
// baseAndQual = combineDupBasesAtI(duplicates, i);
|
||||
bases[i] = baseAndQual.getFirst();
|
||||
quals[i] = baseAndQual.getSecond();
|
||||
}
|
||||
|
|
@ -142,6 +53,7 @@ public class DupUtils {
|
|||
char bestBase = 0;
|
||||
double bestProb = Double.NEGATIVE_INFINITY;
|
||||
double sumProbs = 0;
|
||||
|
||||
for ( int i = 0; i < 4; i++ ) {
|
||||
sumProbs += Math.pow(10, probs[i]);
|
||||
//System.out.printf("Bestprob is %f > %f%n", bestProb, probs[i]);
|
||||
|
|
@ -150,20 +62,20 @@ public class DupUtils {
|
|||
bestProb = probs[i];
|
||||
}
|
||||
}
|
||||
|
||||
Arrays.sort(probs);
|
||||
double normalizedP = Math.pow(10, bestProb) / sumProbs;
|
||||
double normalizedQ = 1 - normalizedP;
|
||||
double eps = Math.pow(10, -maxQScore/10.0);
|
||||
byte qual = QualityUtils.probToQual(normalizedP, eps);
|
||||
if ( false ) {
|
||||
System.out.printf("Best base is %s %.8f%n", bestBase, bestProb);
|
||||
System.out.printf("2nd base is %.8f%n", probs[1]);
|
||||
System.out.printf("normalized P %.8f%n", normalizedP);
|
||||
System.out.printf("normalized Q %.8f%n", 1 - normalizedP);
|
||||
System.out.printf("max Q %2d%n", maxQScore);
|
||||
System.out.printf("eps %.8f%n", eps);
|
||||
System.out.printf("encoded Q %2d%n", qual);
|
||||
}
|
||||
// if ( false ) {
|
||||
// System.out.printf("Best base is %s %.8f%n", bestBase, bestProb);
|
||||
// System.out.printf("2nd base is %.8f%n", probs[1]);
|
||||
// System.out.printf("normalized P %.8f%n", normalizedP);
|
||||
// System.out.printf("normalized Q %.8f%n", 1 - normalizedP);
|
||||
// System.out.printf("max Q %2d%n", maxQScore);
|
||||
// System.out.printf("eps %.8f%n", eps);
|
||||
// System.out.printf("encoded Q %2d%n", qual);
|
||||
// }
|
||||
|
||||
return new Pair<Byte, Byte>((byte)bestBase, qual);
|
||||
}
|
||||
|
|
@ -176,93 +88,31 @@ public class DupUtils {
|
|||
System.out.printf("%n");
|
||||
}
|
||||
|
||||
private static List<Integer> constantOffset( List<SAMRecord> reads, int i ) {
|
||||
List<Integer> l = new ArrayList<Integer>(reads.size());
|
||||
for ( SAMRecord read : reads ) {
|
||||
l.add(i);
|
||||
}
|
||||
return l;
|
||||
}
|
||||
|
||||
// TODO -- get rid of all this crappy, obsolete pileup code
|
||||
|
||||
@Deprecated
|
||||
private static ArrayList<Byte> getBasesAsArrayList( List<SAMRecord> reads, List<Integer> offsets ) {
|
||||
ArrayList<Byte> bases = new ArrayList<Byte>(reads.size());
|
||||
for (byte value : getBasesAsArray(reads, offsets))
|
||||
bases.add(value);
|
||||
return bases;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
private static ArrayList<Byte> getQualsAsArrayList( List<SAMRecord> reads, List<Integer> offsets ) {
|
||||
ArrayList<Byte> quals = new ArrayList<Byte>(reads.size());
|
||||
for (byte value : getQualsAsArray(reads, offsets))
|
||||
quals.add(value);
|
||||
return quals;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public static byte[] getBasesAsArray( List<SAMRecord> reads, List<Integer> offsets ) {
|
||||
byte array[] = new byte[reads.size()];
|
||||
int index = 0;
|
||||
for ( int i = 0; i < reads.size(); i++ ) {
|
||||
SAMRecord read = reads.get(i);
|
||||
int offset = offsets.get(i);
|
||||
if ( offset == -1 ) {
|
||||
array[index++] = ((byte)'D');
|
||||
} else {
|
||||
array[index++] = read.getReadBases()[offset];
|
||||
}
|
||||
}
|
||||
return array;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
private static byte[] getQualsAsArray( List<SAMRecord> reads, List<Integer> offsets ) {
|
||||
byte array[] = new byte[reads.size()];
|
||||
int index = 0;
|
||||
for ( int i = 0; i < reads.size(); i++ ) {
|
||||
SAMRecord read = reads.get(i);
|
||||
int offset = offsets.get(i);
|
||||
|
||||
// skip deletion sites
|
||||
if ( offset == -1 ) {
|
||||
array[index++] = ((byte)0);
|
||||
} else {
|
||||
array[index++] = read.getBaseQualities()[offset];
|
||||
}
|
||||
}
|
||||
return array;
|
||||
}
|
||||
|
||||
private static Pair<Byte, Byte> combineBaseProbs(List<SAMRecord> duplicates, int readOffset, int maxQScore) {
|
||||
List<Integer> offsets = constantOffset(duplicates, readOffset);
|
||||
ArrayList<Byte> bases = getBasesAsArrayList(duplicates, offsets);
|
||||
ArrayList<Byte> quals = getQualsAsArrayList(duplicates, offsets);
|
||||
GenomeLoc loc = GenomeLocParser.createGenomeLoc(duplicates.get(0));
|
||||
ReadBackedPileup pileup = new ReadBackedPileup(loc, duplicates, readOffset);
|
||||
|
||||
final boolean debug = false;
|
||||
|
||||
// calculate base probs
|
||||
double[] qualSums = {0.0, 0.0, 0.0, 0.0};
|
||||
if ( debug ) print4BaseQuals("start", qualSums);
|
||||
for ( int i = 0; i < bases.size(); i++ ) {
|
||||
char base = (char)(byte)bases.get(i);
|
||||
int baseIndex = BaseUtils.simpleBaseToBaseIndex(base);
|
||||
byte qual = quals.get(i);
|
||||
|
||||
for (PileupElement e : pileup ) {
|
||||
int baseIndex = e.getBaseIndex();
|
||||
byte qual = e.getQual();
|
||||
double pqual = QualityUtils.qualToProb(qual);
|
||||
for ( int j = 0; j < 4; j++) {
|
||||
qualSums[j] += Math.log10(j == baseIndex ? pqual : (1 - pqual)/3);
|
||||
}
|
||||
if ( debug ) print4BaseQuals(String.format("%c Q%2d", base, qual), qualSums);
|
||||
|
||||
if ( debug ) print4BaseQuals(String.format("%c Q%2d", e.getBase(), qual), qualSums);
|
||||
}
|
||||
if ( debug ) print4BaseQuals("final", qualSums);
|
||||
|
||||
Pair<Byte, Byte> combined = baseProbs2BaseAndQual(qualSums, maxQScore);
|
||||
// if ( debug )
|
||||
// System.out.printf("%s %s => %c Q%s%n",
|
||||
// BasicPileup.basePileupAsString(duplicates, offsets),
|
||||
// BasicPileup.qualPileupAsString(duplicates, offsets),
|
||||
// (char)(byte)combined.getFirst(), combined.getSecond());
|
||||
if ( debug ) System.out.printf("%s => %c Q%s%n", pileup.getPileupString('N'), (char)(byte)combined.getFirst(), combined.getSecond());
|
||||
|
||||
return combined;
|
||||
}
|
||||
}
|
||||
|
|
@ -39,6 +39,10 @@ public class ReadBackedPileup implements Iterable<PileupElement> {
|
|||
this(loc, readsOffsets2Pileup(reads, offsets));
|
||||
}
|
||||
|
||||
public ReadBackedPileup(GenomeLoc loc, List<SAMRecord> reads, int offset ) {
|
||||
this(loc, readsOffsets2Pileup(reads, offset));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new version of a read backed pileup at loc without any aligned reads
|
||||
*
|
||||
|
|
@ -122,6 +126,25 @@ public class ReadBackedPileup implements Iterable<PileupElement> {
|
|||
return pileup;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper routine for converting reads and a single offset to a PileupElement list.
|
||||
*
|
||||
* @param reads
|
||||
* @param offset
|
||||
* @return
|
||||
*/
|
||||
private static ArrayList<PileupElement> readsOffsets2Pileup(List<SAMRecord> reads, int offset ) {
|
||||
if ( reads == null ) throw new StingException("Illegal null read list in ReadBackedPileup2");
|
||||
if ( offset < 0 ) throw new StingException("Illegal offset < 0 ReadBackedPileup2");
|
||||
|
||||
ArrayList<PileupElement> pileup = new ArrayList<PileupElement>(reads.size());
|
||||
for ( int i = 0; i < reads.size(); i++ ) {
|
||||
pileup.add( new PileupElement( reads.get(i), offset ) );
|
||||
}
|
||||
|
||||
return pileup;
|
||||
}
|
||||
|
||||
// --------------------------------------------------------
|
||||
//
|
||||
// Special 'constructors'
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import java.util.Arrays;
|
|||
import java.util.List;
|
||||
|
||||
public class DuplicatesWalkersIntegrationTest extends WalkerTest {
|
||||
public void testClipper(String name, String args, String md5) {
|
||||
public void testCounter(String name, String args, String md5) {
|
||||
WalkerTestSpec spec = new WalkerTestSpec(
|
||||
"-T CountDuplicates " +
|
||||
"-R /seq/references/Homo_sapiens_assembly18/v0/Homo_sapiens_assembly18.fasta " +
|
||||
|
|
@ -20,6 +20,21 @@ public class DuplicatesWalkersIntegrationTest extends WalkerTest {
|
|||
List<File> result = executeTest(name, spec).getFirst();
|
||||
}
|
||||
|
||||
@Test public void testChr110Mb() { testClipper("testChr1-10mb", "-L chr1:1-10,000,000 --quiet", ""); }
|
||||
@Test public void testIntervalVerbose() { testClipper("testIntervalVerbose", "-L chr1:6,527,154-6,528,292", ""); }
|
||||
@Test public void testChr110Mb() { testCounter("testChr1-10mb", "-L chr1:1-10,000,000 --quiet", "fa8bfdd0b62a13a543bae90f7c674db7"); }
|
||||
@Test public void testIntervalVerbose() { testCounter("testIntervalVerbose", "-L chr1:6,527,154-6,528,292", "1ebcc10b85af16805a54391721776657"); }
|
||||
|
||||
public void testCombiner(String name, String args, String md51, String md52) {
|
||||
WalkerTestSpec spec = new WalkerTestSpec(
|
||||
"-T CombineDuplicates " +
|
||||
"-R /seq/references/Homo_sapiens_assembly18/v0/Homo_sapiens_assembly18.fasta " +
|
||||
"-I /humgen/gsa-hpprojects/GATK/data/Validation_Data/TCGA-06-0188.aligned.duplicates_marked.bam " +
|
||||
"-o %s --outputBAM %s " + args,
|
||||
2, // just one output file
|
||||
Arrays.asList("tmp", "bam"),
|
||||
Arrays.asList(md51, md52));
|
||||
List<File> result = executeTest(name, spec).getFirst();
|
||||
}
|
||||
|
||||
@Test public void testIntervalCombine() { testCombiner("testIntervalCombine", "-L chr1:6,527,154-6,528,292 -maxQ 50", "d41d8cd98f00b204e9800998ecf8427e", "bbde777437fcf4386687e0d11547d0f3"); }
|
||||
@Test public void testIntervalCombineQ60() { testCombiner("testIntervalCombine", "-L chr1:6,527,154-6,528,292 -maxQ 60", "d41d8cd98f00b204e9800998ecf8427e", "93993440eb0455208c7f9881d1115a3c"); }
|
||||
}
|
||||
Loading…
Reference in New Issue