Refactor the Pileup Element with regards to indels

Eric reported this bug due to the reduced reads failing with an index out of bounds on what we thought was a deletion, but turned out to be a read starting with insertion.

   * Refactored PileupElement to distinguish clearly between deletions and read starting with insertion
   * Modified ExtendedEventPileup to correctly distinguish elements with deletion when creating new pileups
   * Refactored most of the lazyLoadNextAlignment() function of the LocusIteratorByState for clarity and to create clear separation between what is a pileup with a deletion and what's not one. Got rid of many useless if statements.
   * Changed the way LocusIteratorByState creates extended event pileups to differentiate between insertions in the beginning of the read and deletions.
   * Every deletion now has an offset (start of the event)
   * Fixed bug when LocusITeratorByState found a read starting with insertion that happened to be a reduced read.
   * Separated the definitions of deletion/insertion (in the beginning of the read) in all UG annotations (and the annotator engine).
   * Pileup depth of coverage for a deleted base will now return the average coverage around the deletion.
   * Indel ReadPositionRankSum test now uses the deletion true offset from the read, changed all appropriate md5's
   * The extra pileup elements now properly read by the Indel mode of the UG made any subsequent call have a different random number and therefore all RankSum tests have slightly different values (in the 10^-3 range). Updated all appropriate md5s after extremely careful inspection -- Thanks Ryan!

 phew!
This commit is contained in:
Mauricio Carneiro 2012-01-17 18:56:50 -05:00
parent 4aacaf8916
commit ffd61f4c1c
20 changed files with 1788 additions and 1612 deletions

View File

@ -23,7 +23,7 @@ import java.util.NoSuchElementException;
*/ */
/** /**
* A LocusView over which the user can iterate. * A LocusView over which the user can iterate.
*/ */
public class AllLocusView extends LocusView { public class AllLocusView extends LocusView {
@ -47,12 +47,13 @@ public class AllLocusView extends LocusView {
/** /**
* Create a new queue of locus contexts. * Create a new queue of locus contexts.
*
* @param provider * @param provider
*/ */
public AllLocusView(LocusShardDataProvider provider) { public AllLocusView(LocusShardDataProvider provider) {
super( provider ); super(provider);
// Seed the state tracking members with the first possible seek position and the first possible locus context. // Seed the state tracking members with the first possible seek position and the first possible locus context.
locusIterator = new GenomeLocusIterator(genomeLocParser,provider.getLocus()); locusIterator = new GenomeLocusIterator(genomeLocParser, provider.getLocus());
} }
public boolean hasNext() { public boolean hasNext() {
@ -63,7 +64,7 @@ public class AllLocusView extends LocusView {
public AlignmentContext next() { public AlignmentContext next() {
advance(); advance();
if(nextPosition == null) if (nextPosition == null)
throw new NoSuchElementException("No next is available in the all locus view"); throw new NoSuchElementException("No next is available in the all locus view");
// Flag to the iterator that no data is waiting in the queue to be processed. // Flag to the iterator that no data is waiting in the queue to be processed.
@ -72,7 +73,7 @@ public class AllLocusView extends LocusView {
AlignmentContext currentLocus; AlignmentContext currentLocus;
// If actual data is present, return it. Otherwise, return empty data. // If actual data is present, return it. Otherwise, return empty data.
if( nextLocus != null && nextLocus.getLocation().equals(nextPosition) ) if (nextLocus != null && nextLocus.getLocation().equals(nextPosition))
currentLocus = nextLocus; currentLocus = nextLocus;
else else
currentLocus = createEmptyLocus(nextPosition); currentLocus = createEmptyLocus(nextPosition);
@ -82,15 +83,15 @@ public class AllLocusView extends LocusView {
private void advance() { private void advance() {
// Already at the next element? Don't move forward. // Already at the next element? Don't move forward.
if(atNextElement) if (atNextElement)
return; return;
// Out of elements? // Out of elements?
if(nextPosition == null && !locusIterator.hasNext()) if (nextPosition == null && !locusIterator.hasNext())
return; return;
// If nextLocus has been consumed, clear it out to make room for the next incoming locus. // If nextLocus has been consumed, clear it out to make room for the next incoming locus.
if(nextPosition != null && nextLocus != null && !nextLocus.getLocation().isPast(nextPosition)) { if (nextPosition != null && nextLocus != null && !nextLocus.getLocation().isPast(nextPosition)) {
nextLocus = null; nextLocus = null;
// Determine the next locus. The trick is that we may have more than one alignment context at the same // Determine the next locus. The trick is that we may have more than one alignment context at the same
@ -98,9 +99,9 @@ public class AllLocusView extends LocusView {
// is still at the current position, we do not increment current position and wait for next call to next() to return // is still at the current position, we do not increment current position and wait for next call to next() to return
// that context. If we know that next context is past the current position, we are done with current // that context. If we know that next context is past the current position, we are done with current
// position // position
if(hasNextLocus()) { if (hasNextLocus()) {
nextLocus = nextLocus(); nextLocus = nextLocus();
if(nextPosition.equals(nextLocus.getLocation())) { if (nextPosition.equals(nextLocus.getLocation())) {
atNextElement = true; atNextElement = true;
return; return;
} }
@ -108,7 +109,7 @@ public class AllLocusView extends LocusView {
} }
// No elements left in queue? Clear out the position state tracker and return. // No elements left in queue? Clear out the position state tracker and return.
if(!locusIterator.hasNext()) { if (!locusIterator.hasNext()) {
nextPosition = null; nextPosition = null;
return; return;
} }
@ -119,9 +120,9 @@ public class AllLocusView extends LocusView {
// Crank the iterator to (if possible) or past the next context. Be careful not to hold a reference to nextLocus // Crank the iterator to (if possible) or past the next context. Be careful not to hold a reference to nextLocus
// while using the hasNextLocus() / nextLocus() machinery; this will cause us to use more memory than is optimal. // while using the hasNextLocus() / nextLocus() machinery; this will cause us to use more memory than is optimal.
while(nextLocus == null || nextLocus.getLocation().isBefore(nextPosition)) { while (nextLocus == null || nextLocus.getLocation().isBefore(nextPosition)) {
nextLocus = null; nextLocus = null;
if(!hasNextLocus()) if (!hasNextLocus())
break; break;
nextLocus = nextLocus(); nextLocus = nextLocus();
} }
@ -129,12 +130,15 @@ public class AllLocusView extends LocusView {
/** /**
* Creates a blank locus context at the specified location. * Creates a blank locus context at the specified location.
*
* @param site Site at which to create the blank locus context. * @param site Site at which to create the blank locus context.
* @return empty context. * @return empty context.
*/ */
private final static List<GATKSAMRecord> EMPTY_PILEUP_READS = Collections.emptyList(); private final static List<GATKSAMRecord> EMPTY_PILEUP_READS = Collections.emptyList();
private final static List<Integer> EMPTY_PILEUP_OFFSETS = Collections.emptyList(); private final static List<Integer> EMPTY_PILEUP_OFFSETS = Collections.emptyList();
private AlignmentContext createEmptyLocus( GenomeLoc site ) { private final static List<Boolean> EMPTY_DELETION_STATUS = Collections.emptyList();
return new AlignmentContext(site,new ReadBackedPileupImpl(site, EMPTY_PILEUP_READS, EMPTY_PILEUP_OFFSETS));
private AlignmentContext createEmptyLocus(GenomeLoc site) {
return new AlignmentContext(site, new ReadBackedPileupImpl(site, EMPTY_PILEUP_READS, EMPTY_PILEUP_OFFSETS));
} }
} }

View File

@ -49,9 +49,13 @@ import org.broadinstitute.sting.utils.sam.ReadUtils;
import java.util.*; import java.util.*;
/** Iterator that traverses a SAM File, accumulating information on a per-locus basis */ /**
* Iterator that traverses a SAM File, accumulating information on a per-locus basis
*/
public class LocusIteratorByState extends LocusIterator { public class LocusIteratorByState extends LocusIterator {
/** our log, which we want to capture anything from this class */ /**
* our log, which we want to capture anything from this class
*/
private static Logger logger = Logger.getLogger(LocusIteratorByState.class); private static Logger logger = Logger.getLogger(LocusIteratorByState.class);
// ----------------------------------------------------------------------------------------------------------------- // -----------------------------------------------------------------------------------------------------------------
@ -92,12 +96,14 @@ public class LocusIteratorByState extends LocusIterator {
boolean generateExtendedEvents = true; // should we generate an additional, special pile for indels between the ref bases? boolean generateExtendedEvents = true; // should we generate an additional, special pile for indels between the ref bases?
// the only purpose of this flag is to shield away a few additional lines of code // the only purpose of this flag is to shield away a few additional lines of code
// when extended piles are not needed, it may not be even worth it... // when extended piles are not needed, it may not be even worth it...
byte[] insertedBases = null; // remember full inserted sequence if we are generating piles of extended events (indels)
int eventLength = -1; // will be set to the length of insertion/deletion if we are generating piles of extended events byte[] insertedBases = null; // remember full inserted sequence if we are generating piles of extended events (indels)
byte eventDelayedFlag = 0; // will be set to non-0 if there was an event (indel) right before the int eventLength = -1; // will be set to the length of insertion/deletion if we are generating piles of extended events
byte eventDelayedFlag = 0; // will be set to non-0 if there was an event (indel) right before the
// current base on the ref. We use a counter-like variable here since clearing the indel event is // current base on the ref. We use a counter-like variable here since clearing the indel event is
// delayed by one base, so we need to remember how long ago we have seen the actual event // delayed by one base, so we need to remember how long ago we have seen the actual event
int eventStart = -1; // where on the read the extended event starts (i.e. the last position on the read prior to the
int eventStart = -1; // where on the read the extended event starts (i.e. the last position on the read prior to the
// event, or -1 if alignment starts with an insertion); this one is easy to recompute on the fly, // event, or -1 if alignment starts with an insertion); this one is easy to recompute on the fly,
// we cache it here mainly for convenience // we cache it here mainly for convenience
@ -111,23 +117,31 @@ public class LocusIteratorByState extends LocusIterator {
//System.out.printf("Creating a SAMRecordState: %s%n", this); //System.out.printf("Creating a SAMRecordState: %s%n", this);
} }
public SAMRecord getRead() { return read; } public SAMRecord getRead() {
return read;
}
/** /**
* What is our current offset in the read's bases that aligns us with the reference genome? * What is our current offset in the read's bases that aligns us with the reference genome?
* *
* @return * @return
*/ */
public int getReadOffset() { return readOffset; } public int getReadOffset() {
return readOffset;
}
/** /**
* What is the current offset w.r.t. the alignment state that aligns us to the readOffset? * What is the current offset w.r.t. the alignment state that aligns us to the readOffset?
* *
* @return * @return
*/ */
public int getGenomeOffset() { return genomeOffset; } public int getGenomeOffset() {
return genomeOffset;
}
public int getGenomePosition() { return read.getAlignmentStart() + getGenomeOffset(); } public int getGenomePosition() {
return read.getAlignmentStart() + getGenomeOffset();
}
public GenomeLoc getLocation(GenomeLocParser genomeLocParser) { public GenomeLoc getLocation(GenomeLocParser genomeLocParser) {
return genomeLocParser.createGenomeLoc(read.getReferenceName(), getGenomePosition()); return genomeLocParser.createGenomeLoc(read.getReferenceName(), getGenomePosition());
@ -137,19 +151,26 @@ public class LocusIteratorByState extends LocusIterator {
return curElement.getOperator(); return curElement.getOperator();
} }
/** Returns true if we just stepped over insertion/into a deletion prior to the last return from stepForwardOnGenome. /**
* Returns true if we just stepped over insertion/into a deletion prior to the last return from stepForwardOnGenome.
* *
* @return * @return
*/ */
public boolean hadIndel() { public boolean hadIndel() {
return ( eventLength > 0 ); return (eventLength > 0);
} }
public int getEventLength() { return eventLength; } public int getEventLength() {
return eventLength;
}
public byte[] getEventBases() { return insertedBases; } public byte[] getEventBases() {
return insertedBases;
}
public int getReadEventStartOffset() { return eventStart; } public int getReadEventStartOffset() {
return eventStart;
}
public String toString() { public String toString() {
return String.format("%s ro=%d go=%d co=%d cec=%d %s", read.getReadName(), readOffset, genomeOffset, cigarOffset, cigarElementCounter, curElement); return String.format("%s ro=%d go=%d co=%d cec=%d %s", read.getReadName(), readOffset, genomeOffset, cigarOffset, cigarElementCounter, curElement);
@ -160,9 +181,9 @@ public class LocusIteratorByState extends LocusIterator {
// (-1 if we did not process a single base yet); this can be last matching base, or last base of an insertion // (-1 if we did not process a single base yet); this can be last matching base, or last base of an insertion
if ( curElement == null || ++cigarElementCounter > curElement.getLength() ) { if (curElement == null || ++cigarElementCounter > curElement.getLength()) {
cigarOffset++; cigarOffset++;
if ( cigarOffset < nCigarElements ) { if (cigarOffset < nCigarElements) {
curElement = cigar.getCigarElement(cigarOffset); curElement = cigar.getCigarElement(cigarOffset);
cigarElementCounter = 0; cigarElementCounter = 0;
// next line: guards against cigar elements of length 0; when new cigar element is retrieved, // next line: guards against cigar elements of length 0; when new cigar element is retrieved,
@ -174,15 +195,15 @@ public class LocusIteratorByState extends LocusIterator {
// current offset of this read is the next ref base after the end of the indel. This position will // current offset of this read is the next ref base after the end of the indel. This position will
// model a point on the reference somewhere after the end of the read. // model a point on the reference somewhere after the end of the read.
genomeOffset++; // extended events need that. Logically, it's legal to advance the genomic offset here: genomeOffset++; // extended events need that. Logically, it's legal to advance the genomic offset here:
// we do step forward on the ref, and by returning null we also indicate that we are past the read end. // we do step forward on the ref, and by returning null we also indicate that we are past the read end.
if ( generateExtendedEvents && eventDelayedFlag > 0 ) { if (generateExtendedEvents && eventDelayedFlag > 0) {
// if we had an indel right before the read ended (i.e. insertion was the last cigar element), // if we had an indel right before the read ended (i.e. insertion was the last cigar element),
// we keep it until next reference base; then we discard it and this will allow the LocusIterator to // we keep it until next reference base; then we discard it and this will allow the LocusIterator to
// finally discard this read // finally discard this read
eventDelayedFlag--; eventDelayedFlag--;
if ( eventDelayedFlag == 0 ) { if (eventDelayedFlag == 0) {
eventLength = -1; // reset event when we are past it eventLength = -1; // reset event when we are past it
insertedBases = null; insertedBases = null;
eventStart = -1; eventStart = -1;
@ -193,34 +214,35 @@ public class LocusIteratorByState extends LocusIterator {
} }
} }
boolean done = false; boolean done = false;
switch (curElement.getOperator()) { switch (curElement.getOperator()) {
case H : // ignore hard clips case H: // ignore hard clips
case P : // ignore pads case P: // ignore pads
cigarElementCounter = curElement.getLength(); cigarElementCounter = curElement.getLength();
break; break;
case I : // insertion w.r.t. the reference case I: // insertion w.r.t. the reference
if ( generateExtendedEvents ) { if (generateExtendedEvents) {
// we see insertions only once, when we step right onto them; the position on the read is scrolled // we see insertions only once, when we step right onto them; the position on the read is scrolled
// past the insertion right after that // past the insertion right after that
if ( eventDelayedFlag > 1 ) throw new UserException.MalformedBAM(read, "Adjacent I/D events in read "+read.getReadName()); if (eventDelayedFlag > 1)
insertedBases = Arrays.copyOfRange(read.getReadBases(),readOffset+1,readOffset+1+curElement.getLength()); throw new UserException.MalformedBAM(read, "Adjacent I/D events in read " + read.getReadName());
eventLength = curElement.getLength() ; insertedBases = Arrays.copyOfRange(read.getReadBases(), readOffset + 1, readOffset + 1 + curElement.getLength());
eventLength = curElement.getLength();
eventStart = readOffset; eventStart = readOffset;
eventDelayedFlag = 2; // insertion causes re-entry into stepForwardOnGenome, so we set the delay to 2 eventDelayedFlag = 2; // insertion causes re-entry into stepForwardOnGenome, so we set the delay to 2
// System.out.println("Inserted "+(new String (insertedBases)) +" after "+readOffset); // System.out.println("Inserted "+(new String (insertedBases)) +" after "+readOffset);
} // continue onto the 'S' case ! } // continue onto the 'S' case !
case S : // soft clip case S: // soft clip
cigarElementCounter = curElement.getLength(); cigarElementCounter = curElement.getLength();
readOffset += curElement.getLength(); readOffset += curElement.getLength();
break; break;
case D : // deletion w.r.t. the reference case D: // deletion w.r.t. the reference
if ( generateExtendedEvents ) { if (generateExtendedEvents) {
if ( cigarElementCounter == 1) { if (cigarElementCounter == 1) {
// generate an extended event only if we just stepped into the deletion (i.e. don't // generate an extended event only if we just stepped into the deletion (i.e. don't
// generate the event at every deleted position on the ref, that's what cigarElementCounter==1 is for!) // generate the event at every deleted position on the ref, that's what cigarElementCounter==1 is for!)
if ( eventDelayedFlag > 1 ) throw new UserException.MalformedBAM(read, "Adjacent I/D events in read "+read.getReadName()); if (eventDelayedFlag > 1)
throw new UserException.MalformedBAM(read, "Adjacent I/D events in read " + read.getReadName());
eventLength = curElement.getLength(); eventLength = curElement.getLength();
eventDelayedFlag = 2; // deletion on the ref causes an immediate return, so we have to delay by 1 only eventDelayedFlag = 2; // deletion on the ref causes an immediate return, so we have to delay by 1 only
eventStart = readOffset; eventStart = readOffset;
@ -232,26 +254,27 @@ public class LocusIteratorByState extends LocusIterator {
genomeOffset++; genomeOffset++;
done = true; done = true;
break; break;
case N : // reference skip (looks and gets processed just like a "deletion", just different logical meaning) case N: // reference skip (looks and gets processed just like a "deletion", just different logical meaning)
genomeOffset++; genomeOffset++;
done = true; done = true;
break; break;
case M : case M:
readOffset++; readOffset++;
genomeOffset++; genomeOffset++;
done = true; done = true;
break; break;
default : throw new IllegalStateException("Case statement didn't deal with cigar op: " + curElement.getOperator()); default:
throw new IllegalStateException("Case statement didn't deal with cigar op: " + curElement.getOperator());
} }
if ( generateExtendedEvents ) { if (generateExtendedEvents) {
if ( eventDelayedFlag > 0 && done ) { if (eventDelayedFlag > 0 && done) {
// if we did make a successful step on the ref, decrement delayed flag. If, upon the decrementthe, // if we did make a successful step on the ref, decrement delayed flag. If, upon the decrementing the,
// the flag is 1, we are standing on the reference base right after the indel (so we have to keep it). // the flag is 1, we are standing on the reference base right after the indel (so we have to keep it).
// Otherwise, we are away from the previous indel and have to clear our memories... // Otherwise, we are away from the previous indel and have to clear our memories...
eventDelayedFlag--; // when we notice an indel, we set delayed flag to 2, so now eventDelayedFlag--; // when we notice an indel, we set delayed flag to 2, so now
// if eventDelayedFlag == 1, an indel occured right before the current base // if eventDelayedFlag == 1, an indel occured right before the current base
if ( eventDelayedFlag == 0 ) { if (eventDelayedFlag == 0) {
eventLength = -1; // reset event when we are past it eventLength = -1; // reset event when we are past it
insertedBases = null; insertedBases = null;
eventStart = -1; eventStart = -1;
@ -274,15 +297,15 @@ public class LocusIteratorByState extends LocusIterator {
// //
// ----------------------------------------------------------------------------------------------------------------- // -----------------------------------------------------------------------------------------------------------------
public LocusIteratorByState(final Iterator<SAMRecord> samIterator, ReadProperties readInformation, GenomeLocParser genomeLocParser, Collection<String> samples ) { public LocusIteratorByState(final Iterator<SAMRecord> samIterator, ReadProperties readInformation, GenomeLocParser genomeLocParser, Collection<String> samples) {
this.readInfo = readInformation; this.readInfo = readInformation;
this.genomeLocParser = genomeLocParser; this.genomeLocParser = genomeLocParser;
this.samples = new ArrayList<String>(samples); this.samples = new ArrayList<String>(samples);
this.readStates = new ReadStateManager(samIterator,readInformation.getDownsamplingMethod()); this.readStates = new ReadStateManager(samIterator, readInformation.getDownsamplingMethod());
// currently the GATK expects this LocusIteratorByState to accept empty sample lists, when // currently the GATK expects this LocusIteratorByState to accept empty sample lists, when
// there's no read data. So we need to throw this error only when samIterator.hasNext() is true // there's no read data. So we need to throw this error only when samIterator.hasNext() is true
if ( this.samples.isEmpty() && samIterator.hasNext() ) { if (this.samples.isEmpty() && samIterator.hasNext()) {
throw new IllegalArgumentException("samples list must not be empty"); throw new IllegalArgumentException("samples list must not be empty");
} }
} }
@ -322,7 +345,7 @@ public class LocusIteratorByState extends LocusIterator {
// ----------------------------------------------------------------------------------------------------------------- // -----------------------------------------------------------------------------------------------------------------
public AlignmentContext next() { public AlignmentContext next() {
lazyLoadNextAlignmentContext(); lazyLoadNextAlignmentContext();
if(!hasNext()) if (!hasNext())
throw new NoSuchElementException("LocusIteratorByState: out of elements."); throw new NoSuchElementException("LocusIteratorByState: out of elements.");
AlignmentContext currentAlignmentContext = nextAlignmentContext; AlignmentContext currentAlignmentContext = nextAlignmentContext;
nextAlignmentContext = null; nextAlignmentContext = null;
@ -334,7 +357,7 @@ public class LocusIteratorByState extends LocusIterator {
* nextAlignmentContext MUST BE null in order for this method to advance to the next entry. * nextAlignmentContext MUST BE null in order for this method to advance to the next entry.
*/ */
private void lazyLoadNextAlignmentContext() { private void lazyLoadNextAlignmentContext() {
while(nextAlignmentContext == null && readStates.hasNext()) { while (nextAlignmentContext == null && readStates.hasNext()) {
// this call will set hasExtendedEvents to true if it picks up a read with indel right before the current position on the ref: // this call will set hasExtendedEvents to true if it picks up a read with indel right before the current position on the ref:
readStates.collectPendingReads(); readStates.collectPendingReads();
@ -350,14 +373,14 @@ public class LocusIteratorByState extends LocusIterator {
// In this case, the subsequent call to next() will emit the normal pileup at the current base // In this case, the subsequent call to next() will emit the normal pileup at the current base
// and shift the position. // and shift the position.
if (readInfo.generateExtendedEvents() && hasExtendedEvents) { if (readInfo.generateExtendedEvents() && hasExtendedEvents) {
Map<String,ReadBackedExtendedEventPileupImpl> fullExtendedEventPileup = new HashMap<String,ReadBackedExtendedEventPileupImpl>(); Map<String, ReadBackedExtendedEventPileupImpl> fullExtendedEventPileup = new HashMap<String, ReadBackedExtendedEventPileupImpl>();
// get current location on the reference and decrement it by 1: the indels we just stepped over // get current location on the reference and decrement it by 1: the indels we just stepped over
// are associated with the *previous* reference base // are associated with the *previous* reference base
GenomeLoc loc = genomeLocParser.incPos(getLocation(),-1); GenomeLoc loc = genomeLocParser.incPos(getLocation(), -1);
boolean hasBeenSampled = false; boolean hasBeenSampled = false;
for(final String sample: samples) { for (final String sample : samples) {
Iterator<SAMRecordState> iterator = readStates.iterator(sample); Iterator<SAMRecordState> iterator = readStates.iterator(sample);
List<ExtendedEventPileupElement> indelPile = new ArrayList<ExtendedEventPileupElement>(readStates.size(sample)); List<ExtendedEventPileupElement> indelPile = new ArrayList<ExtendedEventPileupElement>(readStates.size(sample));
hasBeenSampled |= loc.getStart() <= readStates.getDownsamplingExtent(sample); hasBeenSampled |= loc.getStart() <= readStates.getDownsamplingExtent(sample);
@ -368,103 +391,108 @@ public class LocusIteratorByState extends LocusIterator {
nMQ0Reads = 0; nMQ0Reads = 0;
int maxDeletionLength = 0; int maxDeletionLength = 0;
while(iterator.hasNext()) { while (iterator.hasNext()) {
SAMRecordState state = iterator.next(); final SAMRecordState state = iterator.next();
if ( state.hadIndel() ) { final GATKSAMRecord read = (GATKSAMRecord) state.getRead(); // the actual read
final CigarOperator op = state.getCurrentCigarOperator(); // current cigar operator
final int readOffset = state.getReadOffset(); // the base offset on this read
final int eventStartOffset = state.getReadEventStartOffset(); // this will be -1 if base is not a deletion, or if base is the first deletion in the event. Otherwise, it will give the last base before the deletion began.
final int eventLength = state.getEventLength();
// if (op != CigarOperator.N) // N's are never added to any pileup
// continue;
//
if (state.hadIndel()) { // this read has an indel associated with the previous position on the ref
size++; size++;
if ( state.getEventBases() == null ) { ExtendedEventPileupElement pileupElement;
if (state.getEventBases() == null) { // Deletion event
nDeletions++; nDeletions++;
maxDeletionLength = Math.max(maxDeletionLength,state.getEventLength()); maxDeletionLength = Math.max(maxDeletionLength, state.getEventLength());
pileupElement = new ExtendedEventPileupElement(read, eventStartOffset, eventLength);
}
else { // Insertion event
nInsertions++;
pileupElement = new ExtendedEventPileupElement(read, eventStartOffset, eventLength, state.getEventBases());
} }
else nInsertions++;
indelPile.add ( new ExtendedEventPileupElement((GATKSAMRecord) state.getRead(), state.getReadEventStartOffset(), state.getEventLength(), state.getEventBases()) );
} else { indelPile.add(pileupElement);
// HACK: The readahead mechanism for LocusIteratorByState will effectively read past the current position
// and add in extra reads that start after this indel. Skip these reads.
// My belief at this moment after empirically looking at read->ref alignment is that, in a cigar string
// like 1I76M, the first insertion is between alignment start-1 and alignment start, so we shouldn't be
// filtering these out.
// TODO: UPDATE! Eric tells me that we *might* want reads adjacent to the pileup in the pileup. Strike this block.
//if(state.getRead().getAlignmentStart() > loc.getStart())
// continue;
if ( state.getCurrentCigarOperator() != CigarOperator.N ) {
// this read has no indel associated with the previous position on the ref;
// we count this read in only if it has actual bases, not N span...
if ( state.getCurrentCigarOperator() != CigarOperator.D || readInfo.includeReadsWithDeletionAtLoci() ) {
// if cigar operator is D but the read has no extended event reported (that's why we ended
// up in this branch), it means that we are currently inside a deletion that started earlier;
// we count such reads (with a longer deletion spanning over a deletion at the previous base we are
// about to report) only if includeReadsWithDeletionAtLoci is true.
size++;
indelPile.add ( new ExtendedEventPileupElement((GATKSAMRecord) state.getRead(), state.getReadOffset()-1, -1) // length=-1 --> noevent
);
}
}
} }
if ( state.getRead().getMappingQuality() == 0 ) {
// this read has no indel associated with the previous position on the ref. Criteria to include in the pileup are:
// we only add reads that are not N's
// we only include deletions to the pileup if the walker requests it
else if ( (op != CigarOperator.N) && (op != CigarOperator.D || readInfo.includeReadsWithDeletionAtLoci())) {
size++;
indelPile.add(new ExtendedEventPileupElement((GATKSAMRecord) state.getRead(), readOffset));
}
if (state.getRead().getMappingQuality() == 0)
nMQ0Reads++; nMQ0Reads++;
}
}
if( indelPile.size() != 0 ) fullExtendedEventPileup.put(sample,new ReadBackedExtendedEventPileupImpl(loc,indelPile,size,maxDeletionLength,nInsertions,nDeletions,nMQ0Reads));
}
hasExtendedEvents = false; // we are done with extended events prior to current ref base
// System.out.println("Indel(s) at "+loc);
// for ( ExtendedEventPileupElement pe : indelPile ) { if ( pe.isIndel() ) System.out.println(" "+pe.toString()); }
nextAlignmentContext = new AlignmentContext(loc, new ReadBackedExtendedEventPileupImpl(loc, fullExtendedEventPileup), hasBeenSampled);
} else {
GenomeLoc location = getLocation();
Map<String,ReadBackedPileupImpl> fullPileup = new HashMap<String,ReadBackedPileupImpl>();
}
if (indelPile.size() != 0)
fullExtendedEventPileup.put(sample, new ReadBackedExtendedEventPileupImpl(loc, indelPile, size, maxDeletionLength, nInsertions, nDeletions, nMQ0Reads));
}
hasExtendedEvents = false; // we are done with extended events prior to current ref base
nextAlignmentContext = new AlignmentContext(loc, new ReadBackedExtendedEventPileupImpl(loc, fullExtendedEventPileup), hasBeenSampled);
}
else { // this is a regular event pileup (not extended)
GenomeLoc location = getLocation();
Map<String, ReadBackedPileupImpl> fullPileup = new HashMap<String, ReadBackedPileupImpl>();
boolean hasBeenSampled = false; boolean hasBeenSampled = false;
for(final String sample: samples) { for (final String sample : samples) {
Iterator<SAMRecordState> iterator = readStates.iterator(sample); Iterator<SAMRecordState> iterator = readStates.iterator(sample);
List<PileupElement> pile = new ArrayList<PileupElement>(readStates.size(sample)); List<PileupElement> pile = new ArrayList<PileupElement>(readStates.size(sample));
hasBeenSampled |= location.getStart() <= readStates.getDownsamplingExtent(sample); hasBeenSampled |= location.getStart() <= readStates.getDownsamplingExtent(sample);
size = 0; size = 0; // number of elements in this sample's pileup
nDeletions = 0; nDeletions = 0; // number of deletions in this sample's pileup
nMQ0Reads = 0; nMQ0Reads = 0; // number of MQ0 reads in this sample's pileup (warning: current implementation includes N bases that are MQ0)
while(iterator.hasNext()) { while (iterator.hasNext()) {
SAMRecordState state = iterator.next(); final SAMRecordState state = iterator.next(); // state object with the read/offset information
if ( state.getCurrentCigarOperator() != CigarOperator.D && state.getCurrentCigarOperator() != CigarOperator.N ) { final GATKSAMRecord read = (GATKSAMRecord) state.getRead(); // the actual read
if ( filterBaseInRead((GATKSAMRecord) state.getRead(), location.getStart()) ) { final CigarOperator op = state.getCurrentCigarOperator(); // current cigar operator
//discarded_bases++; final int readOffset = state.getReadOffset(); // the base offset on this read
//printStatus("Adaptor bases", discarded_adaptor_bases); final int eventStartOffset = state.getReadEventStartOffset(); // this will be -1 if base is not a deletion, or if base is the first deletion in the event. Otherwise, it will give the last base before the deletion began.
continue;
} else { if (op == CigarOperator.N) // N's are never added to any pileup
//observed_bases++; continue;
pile.add(new PileupElement((GATKSAMRecord) state.getRead(), state.getReadOffset()));
if (read.getMappingQuality() == 0)
nMQ0Reads++;
if (op == CigarOperator.D) {
if (readInfo.includeReadsWithDeletionAtLoci()) { // only add deletions to the pileup if we are authorized to do so
int leftAlignedStart = (eventStartOffset < 0) ? readOffset : eventStartOffset;
pile.add(new PileupElement(read, leftAlignedStart, true));
size++;
nDeletions++;
}
} else {
if (!filterBaseInRead(read, location.getStart())) {
pile.add(new PileupElement(read, readOffset, false));
size++; size++;
} }
} else if ( readInfo.includeReadsWithDeletionAtLoci() && state.getCurrentCigarOperator() != CigarOperator.N ) {
size++;
pile.add(new PileupElement((GATKSAMRecord) state.getRead(), -1));
nDeletions++;
}
if ( state.getRead().getMappingQuality() == 0 ) {
nMQ0Reads++;
} }
} }
if( pile.size() != 0 ) if (pile.size() != 0) // if this pileup added at least one base, add it to the full pileup
fullPileup.put(sample,new ReadBackedPileupImpl(location,pile,size,nDeletions,nMQ0Reads)); fullPileup.put(sample, new ReadBackedPileupImpl(location, pile, size, nDeletions, nMQ0Reads));
} }
updateReadStates(); // critical - must be called after we get the current state offsets and location updateReadStates(); // critical - must be called after we get the current state offsets and location
// if we got reads with non-D/N over the current position, we are done if (!fullPileup.isEmpty()) // if we got reads with non-D/N over the current position, we are done
if ( !fullPileup.isEmpty() ) nextAlignmentContext = new AlignmentContext(location, new ReadBackedPileupImpl(location,fullPileup),hasBeenSampled); nextAlignmentContext = new AlignmentContext(location, new ReadBackedPileupImpl(location, fullPileup), hasBeenSampled);
} }
} }
} }
// fast testing of position // fast testing of position
private boolean readIsPastCurrentPosition(SAMRecord read) { private boolean readIsPastCurrentPosition(SAMRecord read) {
if ( readStates.isEmpty() ) if (readStates.isEmpty())
return false; return false;
else { else {
SAMRecordState state = readStates.getFirst(); SAMRecordState state = readStates.getFirst();
@ -485,20 +513,18 @@ public class LocusIteratorByState extends LocusIterator {
} }
private void updateReadStates() { private void updateReadStates() {
for(final String sample: samples) { for (final String sample : samples) {
Iterator<SAMRecordState> it = readStates.iterator(sample); Iterator<SAMRecordState> it = readStates.iterator(sample);
while ( it.hasNext() ) { while (it.hasNext()) {
SAMRecordState state = it.next(); SAMRecordState state = it.next();
CigarOperator op = state.stepForwardOnGenome(); CigarOperator op = state.stepForwardOnGenome();
if ( state.hadIndel() && readInfo.generateExtendedEvents() ) hasExtendedEvents = true; if (state.hadIndel() && readInfo.generateExtendedEvents())
else { hasExtendedEvents = true;
else if (op == null) {
// we discard the read only when we are past its end AND indel at the end of the read (if any) was // we discard the read only when we are past its end AND indel at the end of the read (if any) was
// already processed. Keeping the read state that retunred null upon stepForwardOnGenome() is safe // already processed. Keeping the read state that retunred null upon stepForwardOnGenome() is safe
// as the next call to stepForwardOnGenome() will return null again AND will clear hadIndel() flag. // as the next call to stepForwardOnGenome() will return null again AND will clear hadIndel() flag.
if ( op == null ) { // we've stepped off the end of the object it.remove(); // we've stepped off the end of the object
//if (DEBUG) logger.debug(String.format(" removing read %s at %d", state.getRead().getReadName(), state.getRead().getAlignmentStart()));
it.remove();
}
} }
} }
} }
@ -508,20 +534,20 @@ public class LocusIteratorByState extends LocusIterator {
throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!"); throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
} }
private class ReadStateManager { private class ReadStateManager {
private final PeekableIterator<SAMRecord> iterator; private final PeekableIterator<SAMRecord> iterator;
private final DownsamplingMethod downsamplingMethod; private final DownsamplingMethod downsamplingMethod;
private final SamplePartitioner samplePartitioner; private final SamplePartitioner samplePartitioner;
private final Map<String,PerSampleReadStateManager> readStatesBySample = new HashMap<String,PerSampleReadStateManager>(); private final Map<String, PerSampleReadStateManager> readStatesBySample = new HashMap<String, PerSampleReadStateManager>();
private final int targetCoverage; private final int targetCoverage;
private int totalReadStates = 0; private int totalReadStates = 0;
public ReadStateManager(Iterator<SAMRecord> source, DownsamplingMethod downsamplingMethod) { public ReadStateManager(Iterator<SAMRecord> source, DownsamplingMethod downsamplingMethod) {
this.iterator = new PeekableIterator<SAMRecord>(source); this.iterator = new PeekableIterator<SAMRecord>(source);
this.downsamplingMethod = downsamplingMethod.type != null ? downsamplingMethod : DownsamplingMethod.NONE; this.downsamplingMethod = downsamplingMethod.type != null ? downsamplingMethod : DownsamplingMethod.NONE;
switch(this.downsamplingMethod.type) { switch (this.downsamplingMethod.type) {
case BY_SAMPLE: case BY_SAMPLE:
if(downsamplingMethod.toCoverage == null) if (downsamplingMethod.toCoverage == null)
throw new UserException.BadArgumentValue("dcov", "Downsampling coverage (-dcov) must be specified when downsampling by sample"); throw new UserException.BadArgumentValue("dcov", "Downsampling coverage (-dcov) must be specified when downsampling by sample");
this.targetCoverage = downsamplingMethod.toCoverage; this.targetCoverage = downsamplingMethod.toCoverage;
break; break;
@ -529,10 +555,10 @@ public class LocusIteratorByState extends LocusIterator {
this.targetCoverage = Integer.MAX_VALUE; this.targetCoverage = Integer.MAX_VALUE;
} }
Map<String,ReadSelector> readSelectors = new HashMap<String,ReadSelector>(); Map<String, ReadSelector> readSelectors = new HashMap<String, ReadSelector>();
for(final String sample: samples) { for (final String sample : samples) {
readStatesBySample.put(sample,new PerSampleReadStateManager()); readStatesBySample.put(sample, new PerSampleReadStateManager());
readSelectors.put(sample,downsamplingMethod.type == DownsampleType.BY_SAMPLE ? new NRandomReadSelector(null,targetCoverage) : new AllReadsSelector()); readSelectors.put(sample, downsamplingMethod.type == DownsampleType.BY_SAMPLE ? new NRandomReadSelector(null, targetCoverage) : new AllReadsSelector());
} }
samplePartitioner = new SamplePartitioner(readSelectors); samplePartitioner = new SamplePartitioner(readSelectors);
@ -541,6 +567,7 @@ public class LocusIteratorByState extends LocusIterator {
/** /**
* Returns a iterator over all the reads associated with the given sample. Note that remove() is implemented * Returns a iterator over all the reads associated with the given sample. Note that remove() is implemented
* for this iterator; if present, total read states will be decremented. * for this iterator; if present, total read states will be decremented.
*
* @param sample The sample. * @param sample The sample.
* @return Iterator over the reads associated with that sample. * @return Iterator over the reads associated with that sample.
*/ */
@ -569,6 +596,7 @@ public class LocusIteratorByState extends LocusIterator {
/** /**
* Retrieves the total number of reads in the manager across all samples. * Retrieves the total number of reads in the manager across all samples.
*
* @return Total number of reads over all samples. * @return Total number of reads over all samples.
*/ */
public int size() { public int size() {
@ -577,6 +605,7 @@ public class LocusIteratorByState extends LocusIterator {
/** /**
* Retrieves the total number of reads in the manager in the given sample. * Retrieves the total number of reads in the manager in the given sample.
*
* @param sample The sample. * @param sample The sample.
* @return Total number of reads in the given sample. * @return Total number of reads in the given sample.
*/ */
@ -587,6 +616,7 @@ public class LocusIteratorByState extends LocusIterator {
/** /**
* The extent of downsampling; basically, the furthest base out which has 'fallen * The extent of downsampling; basically, the furthest base out which has 'fallen
* victim' to the downsampler. * victim' to the downsampler.
*
* @param sample Sample, downsampled independently. * @param sample Sample, downsampled independently.
* @return Integer stop of the furthest undownsampled region. * @return Integer stop of the furthest undownsampled region.
*/ */
@ -595,9 +625,9 @@ public class LocusIteratorByState extends LocusIterator {
} }
public SAMRecordState getFirst() { public SAMRecordState getFirst() {
for(final String sample: samples) { for (final String sample : samples) {
PerSampleReadStateManager reads = readStatesBySample.get(sample); PerSampleReadStateManager reads = readStatesBySample.get(sample);
if(!reads.isEmpty()) if (!reads.isEmpty())
return reads.peek(); return reads.peek();
} }
return null; return null;
@ -608,19 +638,18 @@ public class LocusIteratorByState extends LocusIterator {
} }
public void collectPendingReads() { public void collectPendingReads() {
if(!iterator.hasNext()) if (!iterator.hasNext())
return; return;
if(readStates.size() == 0) { if (readStates.size() == 0) {
int firstContigIndex = iterator.peek().getReferenceIndex(); int firstContigIndex = iterator.peek().getReferenceIndex();
int firstAlignmentStart = iterator.peek().getAlignmentStart(); int firstAlignmentStart = iterator.peek().getAlignmentStart();
while(iterator.hasNext() && iterator.peek().getReferenceIndex() == firstContigIndex && iterator.peek().getAlignmentStart() == firstAlignmentStart) { while (iterator.hasNext() && iterator.peek().getReferenceIndex() == firstContigIndex && iterator.peek().getAlignmentStart() == firstAlignmentStart) {
samplePartitioner.submitRead(iterator.next()); samplePartitioner.submitRead(iterator.next());
} }
} } else {
else {
// Fast fail in the case that the read is past the current position. // Fast fail in the case that the read is past the current position.
if(readIsPastCurrentPosition(iterator.peek())) if (readIsPastCurrentPosition(iterator.peek()))
return; return;
while (iterator.hasNext() && !readIsPastCurrentPosition(iterator.peek())) { while (iterator.hasNext() && !readIsPastCurrentPosition(iterator.peek())) {
@ -629,7 +658,7 @@ public class LocusIteratorByState extends LocusIterator {
} }
samplePartitioner.complete(); samplePartitioner.complete();
for(final String sample: samples) { for (final String sample : samples) {
ReadSelector aggregator = samplePartitioner.getSelectedReads(sample); ReadSelector aggregator = samplePartitioner.getSelectedReads(sample);
Collection<SAMRecord> newReads = new ArrayList<SAMRecord>(aggregator.getSelectedReads()); Collection<SAMRecord> newReads = new ArrayList<SAMRecord>(aggregator.getSelectedReads());
@ -638,21 +667,20 @@ public class LocusIteratorByState extends LocusIterator {
int numReads = statesBySample.size(); int numReads = statesBySample.size();
int downsamplingExtent = aggregator.getDownsamplingExtent(); int downsamplingExtent = aggregator.getDownsamplingExtent();
if(numReads+newReads.size()<=targetCoverage || downsamplingMethod.type==DownsampleType.NONE) { if (numReads + newReads.size() <= targetCoverage || downsamplingMethod.type == DownsampleType.NONE) {
long readLimit = aggregator.getNumReadsSeen(); long readLimit = aggregator.getNumReadsSeen();
addReadsToSample(statesBySample,newReads,readLimit); addReadsToSample(statesBySample, newReads, readLimit);
statesBySample.specifyNewDownsamplingExtent(downsamplingExtent); statesBySample.specifyNewDownsamplingExtent(downsamplingExtent);
} } else {
else {
int[] counts = statesBySample.getCountsPerAlignmentStart(); int[] counts = statesBySample.getCountsPerAlignmentStart();
int[] updatedCounts = new int[counts.length]; int[] updatedCounts = new int[counts.length];
System.arraycopy(counts,0,updatedCounts,0,counts.length); System.arraycopy(counts, 0, updatedCounts, 0, counts.length);
boolean readPruned = true; boolean readPruned = true;
while(numReads+newReads.size()>targetCoverage && readPruned) { while (numReads + newReads.size() > targetCoverage && readPruned) {
readPruned = false; readPruned = false;
for(int alignmentStart=updatedCounts.length-1;numReads+newReads.size()>targetCoverage&&alignmentStart>=0;alignmentStart--) { for (int alignmentStart = updatedCounts.length - 1; numReads + newReads.size() > targetCoverage && alignmentStart >= 0; alignmentStart--) {
if(updatedCounts[alignmentStart] > 1) { if (updatedCounts[alignmentStart] > 1) {
updatedCounts[alignmentStart]--; updatedCounts[alignmentStart]--;
numReads--; numReads--;
readPruned = true; readPruned = true;
@ -660,7 +688,7 @@ public class LocusIteratorByState extends LocusIterator {
} }
} }
if(numReads == targetCoverage) { if (numReads == targetCoverage) {
updatedCounts[0]--; updatedCounts[0]--;
numReads--; numReads--;
} }
@ -668,18 +696,18 @@ public class LocusIteratorByState extends LocusIterator {
BitSet toPurge = new BitSet(readStates.size()); BitSet toPurge = new BitSet(readStates.size());
int readOffset = 0; int readOffset = 0;
for(int i = 0; i < updatedCounts.length; i++) { for (int i = 0; i < updatedCounts.length; i++) {
int n = counts[i]; int n = counts[i];
int k = updatedCounts[i]; int k = updatedCounts[i];
for(Integer purgedElement: MathUtils.sampleIndicesWithoutReplacement(n,n-k)) for (Integer purgedElement : MathUtils.sampleIndicesWithoutReplacement(n, n - k))
toPurge.set(readOffset+purgedElement); toPurge.set(readOffset + purgedElement);
readOffset += counts[i]; readOffset += counts[i];
} }
downsamplingExtent = Math.max(downsamplingExtent,statesBySample.purge(toPurge)); downsamplingExtent = Math.max(downsamplingExtent, statesBySample.purge(toPurge));
addReadsToSample(statesBySample,newReads,targetCoverage-numReads); addReadsToSample(statesBySample, newReads, targetCoverage - numReads);
statesBySample.specifyNewDownsamplingExtent(downsamplingExtent); statesBySample.specifyNewDownsamplingExtent(downsamplingExtent);
} }
} }
@ -688,23 +716,25 @@ public class LocusIteratorByState extends LocusIterator {
/** /**
* Add reads with the given sample name to the given hanger entry. * Add reads with the given sample name to the given hanger entry.
*
* @param readStates The list of read states to add this collection of reads. * @param readStates The list of read states to add this collection of reads.
* @param reads Reads to add. Selected reads will be pulled from this source. * @param reads Reads to add. Selected reads will be pulled from this source.
* @param maxReads Maximum number of reads to add. * @param maxReads Maximum number of reads to add.
*/ */
private void addReadsToSample(final PerSampleReadStateManager readStates, final Collection<SAMRecord> reads, final long maxReads) { private void addReadsToSample(final PerSampleReadStateManager readStates, final Collection<SAMRecord> reads, final long maxReads) {
if(reads.isEmpty()) if (reads.isEmpty())
return; return;
Collection<SAMRecordState> newReadStates = new LinkedList<SAMRecordState>(); Collection<SAMRecordState> newReadStates = new LinkedList<SAMRecordState>();
int readCount = 0; int readCount = 0;
for(SAMRecord read: reads) { for (SAMRecord read : reads) {
if(readCount < maxReads) { if (readCount < maxReads) {
SAMRecordState state = new SAMRecordState(read, readInfo.generateExtendedEvents()); SAMRecordState state = new SAMRecordState(read, readInfo.generateExtendedEvents());
state.stepForwardOnGenome(); state.stepForwardOnGenome();
newReadStates.add(state); newReadStates.add(state);
// TODO: What if we downsample the extended events away? // TODO: What if we downsample the extended events away?
if (state.hadIndel()) hasExtendedEvents = true; if (state.hadIndel())
hasExtendedEvents = true;
readCount++; readCount++;
} }
} }
@ -735,7 +765,7 @@ public class LocusIteratorByState extends LocusIterator {
} }
public void specifyNewDownsamplingExtent(int downsamplingExtent) { public void specifyNewDownsamplingExtent(int downsamplingExtent) {
this.downsamplingExtent = Math.max(this.downsamplingExtent,downsamplingExtent); this.downsamplingExtent = Math.max(this.downsamplingExtent, downsamplingExtent);
} }
public int getDownsamplingExtent() { public int getDownsamplingExtent() {
@ -745,7 +775,7 @@ public class LocusIteratorByState extends LocusIterator {
public int[] getCountsPerAlignmentStart() { public int[] getCountsPerAlignmentStart() {
int[] counts = new int[readStateCounter.size()]; int[] counts = new int[readStateCounter.size()];
int index = 0; int index = 0;
for(Counter counter: readStateCounter) for (Counter counter : readStateCounter)
counts[index++] = counter.getCount(); counts[index++] = counter.getCount();
return counts; return counts;
} }
@ -766,7 +796,7 @@ public class LocusIteratorByState extends LocusIterator {
wrappedIterator.remove(); wrappedIterator.remove();
Counter counter = readStateCounter.peek(); Counter counter = readStateCounter.peek();
counter.decrement(); counter.decrement();
if(counter.getCount() == 0) if (counter.getCount() == 0)
readStateCounter.remove(); readStateCounter.remove();
} }
}; };
@ -775,13 +805,14 @@ public class LocusIteratorByState extends LocusIterator {
/** /**
* Purge the given elements from the bitset. If an element in the bitset is true, purge * Purge the given elements from the bitset. If an element in the bitset is true, purge
* the corresponding read state. * the corresponding read state.
*
* @param elements bits from the set to purge. * @param elements bits from the set to purge.
* @return the extent of the final downsampled read. * @return the extent of the final downsampled read.
*/ */
public int purge(final BitSet elements) { public int purge(final BitSet elements) {
int downsamplingExtent = 0; int downsamplingExtent = 0;
if(elements.isEmpty() || readStates.isEmpty()) return downsamplingExtent; if (elements.isEmpty() || readStates.isEmpty()) return downsamplingExtent;
Iterator<SAMRecordState> readStateIterator = readStates.iterator(); Iterator<SAMRecordState> readStateIterator = readStates.iterator();
@ -794,22 +825,22 @@ public class LocusIteratorByState extends LocusIterator {
int toPurge = elements.nextSetBit(0); int toPurge = elements.nextSetBit(0);
int removedCount = 0; int removedCount = 0;
while(readStateIterator.hasNext() && toPurge >= 0) { while (readStateIterator.hasNext() && toPurge >= 0) {
SAMRecordState state = readStateIterator.next(); SAMRecordState state = readStateIterator.next();
downsamplingExtent = Math.max(downsamplingExtent,state.getRead().getAlignmentEnd()); downsamplingExtent = Math.max(downsamplingExtent, state.getRead().getAlignmentEnd());
if(readIndex == toPurge) { if (readIndex == toPurge) {
readStateIterator.remove(); readStateIterator.remove();
currentCounter.decrement(); currentCounter.decrement();
if(currentCounter.getCount() == 0) if (currentCounter.getCount() == 0)
counterIterator.remove(); counterIterator.remove();
removedCount++; removedCount++;
toPurge = elements.nextSetBit(toPurge+1); toPurge = elements.nextSetBit(toPurge + 1);
} }
readIndex++; readIndex++;
alignmentStartCounter--; alignmentStartCounter--;
if(alignmentStartCounter == 0 && counterIterator.hasNext()) { if (alignmentStartCounter == 0 && counterIterator.hasNext()) {
currentCounter = counterIterator.next(); currentCounter = counterIterator.next();
alignmentStartCounter = currentCounter.getCount(); alignmentStartCounter = currentCounter.getCount();
} }
@ -849,12 +880,14 @@ public class LocusIteratorByState extends LocusIterator {
interface ReadSelector { interface ReadSelector {
/** /**
* All previous selectors in the chain have allowed this read. Submit it to this selector for consideration. * All previous selectors in the chain have allowed this read. Submit it to this selector for consideration.
*
* @param read the read to evaluate. * @param read the read to evaluate.
*/ */
public void submitRead(SAMRecord read); public void submitRead(SAMRecord read);
/** /**
* A previous selector has deemed this read unfit. Notify this selector so that this selector's counts are valid. * A previous selector has deemed this read unfit. Notify this selector so that this selector's counts are valid.
*
* @param read the read previously rejected. * @param read the read previously rejected.
*/ */
public void notifyReadRejected(SAMRecord read); public void notifyReadRejected(SAMRecord read);
@ -866,12 +899,14 @@ interface ReadSelector {
/** /**
* Retrieve the number of reads seen by this selector so far. * Retrieve the number of reads seen by this selector so far.
*
* @return number of reads seen. * @return number of reads seen.
*/ */
public long getNumReadsSeen(); public long getNumReadsSeen();
/** /**
* Return the number of reads accepted by this selector so far. * Return the number of reads accepted by this selector so far.
*
* @return number of reads selected. * @return number of reads selected.
*/ */
public long getNumReadsSelected(); public long getNumReadsSelected();
@ -880,12 +915,14 @@ interface ReadSelector {
* Gets the locus at which the last of the downsampled reads selected by this selector ends. The value returned will be the * Gets the locus at which the last of the downsampled reads selected by this selector ends. The value returned will be the
* last aligned position from this selection to which a downsampled read aligns -- in other words, if a read is thrown out at * last aligned position from this selection to which a downsampled read aligns -- in other words, if a read is thrown out at
* position 3 whose cigar string is 76M, the value of this parameter will be 78. * position 3 whose cigar string is 76M, the value of this parameter will be 78.
*
* @return If any read has been downsampled, this will return the last aligned base of the longest alignment. Else, 0. * @return If any read has been downsampled, this will return the last aligned base of the longest alignment. Else, 0.
*/ */
public int getDownsamplingExtent(); public int getDownsamplingExtent();
/** /**
* Get the reads selected by this selector. * Get the reads selected by this selector.
*
* @return collection of reads selected by this selector. * @return collection of reads selected by this selector.
*/ */
public Collection<SAMRecord> getSelectedReads(); public Collection<SAMRecord> getSelectedReads();
@ -911,7 +948,7 @@ class AllReadsSelector implements ReadSelector {
public void notifyReadRejected(SAMRecord read) { public void notifyReadRejected(SAMRecord read) {
readsSeen++; readsSeen++;
downsamplingExtent = Math.max(downsamplingExtent,read.getAlignmentEnd()); downsamplingExtent = Math.max(downsamplingExtent, read.getAlignmentEnd());
} }
public void complete() { public void complete() {
@ -949,18 +986,18 @@ class NRandomReadSelector implements ReadSelector {
private final ReservoirDownsampler<SAMRecord> reservoir; private final ReservoirDownsampler<SAMRecord> reservoir;
private final ReadSelector chainedSelector; private final ReadSelector chainedSelector;
private long readsSeen = 0; private long readsSeen = 0;
private int downsamplingExtent = 0; private int downsamplingExtent = 0;
public NRandomReadSelector(ReadSelector chainedSelector, long readLimit) { public NRandomReadSelector(ReadSelector chainedSelector, long readLimit) {
this.reservoir = new ReservoirDownsampler<SAMRecord>((int)readLimit); this.reservoir = new ReservoirDownsampler<SAMRecord>((int) readLimit);
this.chainedSelector = chainedSelector; this.chainedSelector = chainedSelector;
} }
public void submitRead(SAMRecord read) { public void submitRead(SAMRecord read) {
SAMRecord displaced = reservoir.add(read); SAMRecord displaced = reservoir.add(read);
if(displaced != null && chainedSelector != null) { if (displaced != null && chainedSelector != null) {
chainedSelector.notifyReadRejected(read); chainedSelector.notifyReadRejected(read);
downsamplingExtent = Math.max(downsamplingExtent,read.getAlignmentEnd()); downsamplingExtent = Math.max(downsamplingExtent, read.getAlignmentEnd());
} }
readsSeen++; readsSeen++;
} }
@ -970,9 +1007,9 @@ class NRandomReadSelector implements ReadSelector {
} }
public void complete() { public void complete() {
for(SAMRecord read: reservoir.getDownsampledContents()) for (SAMRecord read : reservoir.getDownsampledContents())
chainedSelector.submitRead(read); chainedSelector.submitRead(read);
if(chainedSelector != null) if (chainedSelector != null)
chainedSelector.complete(); chainedSelector.complete();
} }
@ -987,7 +1024,7 @@ class NRandomReadSelector implements ReadSelector {
public int getDownsamplingExtent() { public int getDownsamplingExtent() {
return downsamplingExtent; return downsamplingExtent;
} }
public Collection<SAMRecord> getSelectedReads() { public Collection<SAMRecord> getSelectedReads() {
return reservoir.getDownsampledContents(); return reservoir.getDownsampledContents();
@ -996,7 +1033,7 @@ class NRandomReadSelector implements ReadSelector {
public void reset() { public void reset() {
reservoir.clear(); reservoir.clear();
downsamplingExtent = 0; downsamplingExtent = 0;
if(chainedSelector != null) if (chainedSelector != null)
chainedSelector.reset(); chainedSelector.reset();
} }
} }
@ -1005,23 +1042,23 @@ class NRandomReadSelector implements ReadSelector {
* Note: stores reads by sample ID string, not by sample object * Note: stores reads by sample ID string, not by sample object
*/ */
class SamplePartitioner implements ReadSelector { class SamplePartitioner implements ReadSelector {
private final Map<String,ReadSelector> readsBySample; private final Map<String, ReadSelector> readsBySample;
private long readsSeen = 0; private long readsSeen = 0;
public SamplePartitioner(Map<String,ReadSelector> readSelectors) { public SamplePartitioner(Map<String, ReadSelector> readSelectors) {
readsBySample = readSelectors; readsBySample = readSelectors;
} }
public void submitRead(SAMRecord read) { public void submitRead(SAMRecord read) {
String sampleName = read.getReadGroup()!=null ? read.getReadGroup().getSample() : null; String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null;
if(readsBySample.containsKey(sampleName)) if (readsBySample.containsKey(sampleName))
readsBySample.get(sampleName).submitRead(read); readsBySample.get(sampleName).submitRead(read);
readsSeen++; readsSeen++;
} }
public void notifyReadRejected(SAMRecord read) { public void notifyReadRejected(SAMRecord read) {
String sampleName = read.getReadGroup()!=null ? read.getReadGroup().getSample() : null; String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null;
if(readsBySample.containsKey(sampleName)) if (readsBySample.containsKey(sampleName))
readsBySample.get(sampleName).notifyReadRejected(read); readsBySample.get(sampleName).notifyReadRejected(read);
readsSeen++; readsSeen++;
} }
@ -1040,23 +1077,23 @@ class SamplePartitioner implements ReadSelector {
public int getDownsamplingExtent() { public int getDownsamplingExtent() {
int downsamplingExtent = 0; int downsamplingExtent = 0;
for(ReadSelector storage: readsBySample.values()) for (ReadSelector storage : readsBySample.values())
downsamplingExtent = Math.max(downsamplingExtent,storage.getDownsamplingExtent()); downsamplingExtent = Math.max(downsamplingExtent, storage.getDownsamplingExtent());
return downsamplingExtent; return downsamplingExtent;
} }
public Collection<SAMRecord> getSelectedReads() { public Collection<SAMRecord> getSelectedReads() {
throw new UnsupportedOperationException("Cannot directly get selected reads from a read partitioner."); throw new UnsupportedOperationException("Cannot directly get selected reads from a read partitioner.");
} }
public ReadSelector getSelectedReads(String sampleName) { public ReadSelector getSelectedReads(String sampleName) {
if(!readsBySample.containsKey(sampleName)) if (!readsBySample.containsKey(sampleName))
throw new NoSuchElementException("Sample name not found"); throw new NoSuchElementException("Sample name not found");
return readsBySample.get(sampleName); return readsBySample.get(sampleName);
} }
public void reset() { public void reset() {
for(ReadSelector storage: readsBySample.values()) for (ReadSelector storage : readsBySample.values())
storage.reset(); storage.reset();
readsSeen = 0; readsSeen = 0;
} }

View File

@ -25,13 +25,13 @@ public class BaseQualityRankSumTest extends RankSumTest {
protected void fillQualsFromPileup(byte ref, byte alt, ReadBackedPileup pileup, List<Double> refQuals, List<Double> altQuals) { protected void fillQualsFromPileup(byte ref, byte alt, ReadBackedPileup pileup, List<Double> refQuals, List<Double> altQuals) {
for ( final PileupElement p : pileup ) { for ( final PileupElement p : pileup ) {
if( isUsableBase(p) ) { if( isUsableBase(p) ) {
if ( p.getBase() == ref ) { if ( p.getBase() == ref )
refQuals.add((double)p.getQual()); refQuals.add((double)p.getQual());
} else if ( p.getBase() == alt ) { else if ( p.getBase() == alt )
altQuals.add((double)p.getQual()); altQuals.add((double)p.getQual());
}
} }
} }
} }
protected void fillIndelQualsFromPileup(ReadBackedPileup pileup, List<Double> refQuals, List<Double> altQuals) { protected void fillIndelQualsFromPileup(ReadBackedPileup pileup, List<Double> refQuals, List<Double> altQuals) {
// equivalent is whether indel likelihoods for reads corresponding to ref allele are more likely than reads corresponding to alt allele ? // equivalent is whether indel likelihoods for reads corresponding to ref allele are more likely than reads corresponding to alt allele ?
@ -57,8 +57,6 @@ public class BaseQualityRankSumTest extends RankSumTest {
refQuals.add(-10.0*refLikelihood); refQuals.add(-10.0*refLikelihood);
else if (altLikelihood > refLikelihood + INDEL_LIKELIHOOD_THRESH) else if (altLikelihood > refLikelihood + INDEL_LIKELIHOOD_THRESH)
altQuals.add(-10.0*altLikelihood); altQuals.add(-10.0*altLikelihood);
} }
} }
} }

View File

@ -205,7 +205,7 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat
for ( Map.Entry<String, AlignmentContext> sample : stratifiedContexts.entrySet() ) { for ( Map.Entry<String, AlignmentContext> sample : stratifiedContexts.entrySet() ) {
for (PileupElement p : sample.getValue().getBasePileup()) { for (PileupElement p : sample.getValue().getBasePileup()) {
if ( p.isDeletion() || p.isReducedRead() ) // ignore deletions and reduced reads if ( p.isDeletion() || p.getRead().isReducedRead() ) // ignore deletions and reduced reads
continue; continue;
if ( p.getRead().getMappingQuality() < 20 || p.getQual() < 20 ) if ( p.getRead().getMappingQuality() < 20 || p.getQual() < 20 )
@ -258,7 +258,7 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat
continue; continue;
for (final PileupElement p: pileup) { for (final PileupElement p: pileup) {
if ( p.isReducedRead() ) // ignore reduced reads if ( p.getRead().isReducedRead() ) // ignore reduced reads
continue; continue;
if ( p.getRead().getMappingQuality() < 20) if ( p.getRead().getMappingQuality() < 20)
continue; continue;

View File

@ -24,7 +24,6 @@
package org.broadinstitute.sting.gatk.walkers.annotator; package org.broadinstitute.sting.gatk.walkers.annotator;
import net.sf.samtools.SAMRecord;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext; import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContextUtils; import org.broadinstitute.sting.gatk.contexts.AlignmentContextUtils;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
@ -43,6 +42,7 @@ import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.pileup.PileupElement; import org.broadinstitute.sting.utils.pileup.PileupElement;
import org.broadinstitute.sting.utils.pileup.ReadBackedPileup; import org.broadinstitute.sting.utils.pileup.ReadBackedPileup;
import org.broadinstitute.sting.utils.sam.AlignmentUtils; import org.broadinstitute.sting.utils.sam.AlignmentUtils;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import org.broadinstitute.sting.utils.variantcontext.Allele; import org.broadinstitute.sting.utils.variantcontext.Allele;
import org.broadinstitute.sting.utils.variantcontext.Genotype; import org.broadinstitute.sting.utils.variantcontext.Genotype;
import org.broadinstitute.sting.utils.variantcontext.VariantContext; import org.broadinstitute.sting.utils.variantcontext.VariantContext;
@ -62,15 +62,15 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
private final static char REGEXP_WILDCARD = '.'; private final static char REGEXP_WILDCARD = '.';
public Map<String, Object> annotate(RefMetaDataTracker tracker, AnnotatorCompatibleWalker walker, ReferenceContext ref, Map<String, AlignmentContext> stratifiedContexts, VariantContext vc) { public Map<String, Object> annotate(RefMetaDataTracker tracker, AnnotatorCompatibleWalker walker, ReferenceContext ref, Map<String, AlignmentContext> stratifiedContexts, VariantContext vc) {
if (stratifiedContexts.size() == 0 ) // size 0 means that call was made by someone else and we have no data here if (stratifiedContexts.size() == 0) // size 0 means that call was made by someone else and we have no data here
return null; return null;
if (vc.isSNP() && !vc.isBiallelic()) if (vc.isSNP() && !vc.isBiallelic())
return null; return null;
final AlignmentContext context = AlignmentContextUtils.joinContexts(stratifiedContexts.values()); final AlignmentContext context = AlignmentContextUtils.joinContexts(stratifiedContexts.values());
final int contextWingSize = Math.min(((int)ref.getWindow().size() - 1)/2, MIN_CONTEXT_WING_SIZE); final int contextWingSize = Math.min(((int) ref.getWindow().size() - 1) / 2, MIN_CONTEXT_WING_SIZE);
final int contextSize = contextWingSize * 2 + 1; final int contextSize = contextWingSize * 2 + 1;
final int locus = ref.getLocus().getStart() + (ref.getLocus().getStop() - ref.getLocus().getStart()) / 2; final int locus = ref.getLocus().getStart() + (ref.getLocus().getStop() - ref.getLocus().getStart()) / 2;
@ -84,14 +84,14 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
if (pileup == null) if (pileup == null)
return null; return null;
final List<Haplotype> haplotypes = computeHaplotypes(pileup, contextSize, locus, vc); final List<Haplotype> haplotypes = computeHaplotypes(pileup, contextSize, locus, vc);
final MathUtils.RunningAverage scoreRA = new MathUtils.RunningAverage(); final MathUtils.RunningAverage scoreRA = new MathUtils.RunningAverage();
if (haplotypes != null) { if (haplotypes != null) {
for ( final Genotype genotype : vc.getGenotypes()) { for (final Genotype genotype : vc.getGenotypes()) {
final AlignmentContext thisContext = stratifiedContexts.get(genotype.getSampleName()); final AlignmentContext thisContext = stratifiedContexts.get(genotype.getSampleName());
if ( thisContext != null ) { if (thisContext != null) {
final ReadBackedPileup thisPileup; final ReadBackedPileup thisPileup;
if (thisContext.hasExtendedEventPileup()) if (thisContext.hasExtendedEventPileup())
thisPileup = thisContext.getExtendedEventPileup(); thisPileup = thisContext.getExtendedEventPileup();
@ -102,14 +102,13 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
if (thisPileup != null) { if (thisPileup != null) {
if (vc.isSNP()) if (vc.isSNP())
scoreRA.add( scoreReadsAgainstHaplotypes(haplotypes, thisPileup, contextSize, locus) ); // Taking the simple average of all sample's score since the score can be negative and the RMS doesn't make sense scoreRA.add(scoreReadsAgainstHaplotypes(haplotypes, thisPileup, contextSize, locus)); // Taking the simple average of all sample's score since the score can be negative and the RMS doesn't make sense
else if (vc.isIndel() || vc.isMixed()) { else if (vc.isIndel() || vc.isMixed()) {
Double d = scoreIndelsAgainstHaplotypes(thisPileup); Double d = scoreIndelsAgainstHaplotypes(thisPileup);
if (d == null) if (d == null)
return null; return null;
scoreRA.add( d ); // Taking the simple average of all sample's score since the score can be negative and the RMS doesn't make sense scoreRA.add(d); // Taking the simple average of all sample's score since the score can be negative and the RMS doesn't make sense
} } else
else
return null; return null;
} }
} }
@ -122,12 +121,12 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
return map; return map;
} }
private class HaplotypeComparator implements Comparator<Haplotype>{ private class HaplotypeComparator implements Comparator<Haplotype> {
public int compare(Haplotype a, Haplotype b) { public int compare(Haplotype a, Haplotype b) {
if (a.getQualitySum() < b.getQualitySum()) if (a.getQualitySum() < b.getQualitySum())
return 1; return 1;
if (a.getQualitySum() > b.getQualitySum()){ if (a.getQualitySum() > b.getQualitySum()) {
return -1; return -1;
} }
return 0; return 0;
@ -137,39 +136,38 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
private List<Haplotype> computeHaplotypes(final ReadBackedPileup pileup, final int contextSize, final int locus, final VariantContext vc) { private List<Haplotype> computeHaplotypes(final ReadBackedPileup pileup, final int contextSize, final int locus, final VariantContext vc) {
// Compute all possible haplotypes consistent with current pileup // Compute all possible haplotypes consistent with current pileup
int haplotypesToCompute = vc.getAlternateAlleles().size()+1; int haplotypesToCompute = vc.getAlternateAlleles().size() + 1;
final PriorityQueue<Haplotype> candidateHaplotypeQueue = new PriorityQueue<Haplotype>(100, new HaplotypeComparator()); final PriorityQueue<Haplotype> candidateHaplotypeQueue = new PriorityQueue<Haplotype>(100, new HaplotypeComparator());
final PriorityQueue<Haplotype> consensusHaplotypeQueue = new PriorityQueue<Haplotype>(MAX_CONSENSUS_HAPLOTYPES_TO_CONSIDER, new HaplotypeComparator()); final PriorityQueue<Haplotype> consensusHaplotypeQueue = new PriorityQueue<Haplotype>(MAX_CONSENSUS_HAPLOTYPES_TO_CONSIDER, new HaplotypeComparator());
for ( final PileupElement p : pileup ) { for (final PileupElement p : pileup) {
final Haplotype haplotypeFromRead = getHaplotypeFromRead(p, contextSize, locus); final Haplotype haplotypeFromRead = getHaplotypeFromRead(p, contextSize, locus);
candidateHaplotypeQueue.add(haplotypeFromRead); candidateHaplotypeQueue.add(haplotypeFromRead);
} }
// Now that priority queue has been built with all reads at context, we need to merge and find possible segregating haplotypes // Now that priority queue has been built with all reads at context, we need to merge and find possible segregating haplotypes
Haplotype elem; Haplotype elem;
while ((elem = candidateHaplotypeQueue.poll()) != null) { while ((elem = candidateHaplotypeQueue.poll()) != null) {
boolean foundHaplotypeMatch = false; boolean foundHaplotypeMatch = false;
Haplotype lastCheckedHaplotype = null; Haplotype lastCheckedHaplotype = null;
for ( final Haplotype haplotypeFromList : consensusHaplotypeQueue ) { for (final Haplotype haplotypeFromList : consensusHaplotypeQueue) {
final Haplotype consensusHaplotype = getConsensusHaplotype(elem, haplotypeFromList); final Haplotype consensusHaplotype = getConsensusHaplotype(elem, haplotypeFromList);
if (consensusHaplotype != null) { if (consensusHaplotype != null) {
foundHaplotypeMatch = true; foundHaplotypeMatch = true;
if (consensusHaplotype.getQualitySum() > haplotypeFromList.getQualitySum()) { if (consensusHaplotype.getQualitySum() > haplotypeFromList.getQualitySum()) {
consensusHaplotypeQueue.remove(haplotypeFromList); consensusHaplotypeQueue.remove(haplotypeFromList);
consensusHaplotypeQueue.add(consensusHaplotype); consensusHaplotypeQueue.add(consensusHaplotype);
} }
break; break;
} } else {
else {
lastCheckedHaplotype = haplotypeFromList; lastCheckedHaplotype = haplotypeFromList;
} }
} }
if (!foundHaplotypeMatch && consensusHaplotypeQueue.size() < MAX_CONSENSUS_HAPLOTYPES_TO_CONSIDER) { if (!foundHaplotypeMatch && consensusHaplotypeQueue.size() < MAX_CONSENSUS_HAPLOTYPES_TO_CONSIDER) {
consensusHaplotypeQueue.add(elem); consensusHaplotypeQueue.add(elem);
} else if (!foundHaplotypeMatch && lastCheckedHaplotype != null && elem.getQualitySum() > lastCheckedHaplotype.getQualitySum() ) { } else if (!foundHaplotypeMatch && lastCheckedHaplotype != null && elem.getQualitySum() > lastCheckedHaplotype.getQualitySum()) {
consensusHaplotypeQueue.remove(lastCheckedHaplotype); consensusHaplotypeQueue.remove(lastCheckedHaplotype);
consensusHaplotypeQueue.add(elem); consensusHaplotypeQueue.add(elem);
} }
@ -180,12 +178,14 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
// The consensus haplotypes are in a quality-ordered priority queue, so the best haplotypes are just the ones at the front of the queue // The consensus haplotypes are in a quality-ordered priority queue, so the best haplotypes are just the ones at the front of the queue
final Haplotype haplotype1 = consensusHaplotypeQueue.poll(); final Haplotype haplotype1 = consensusHaplotypeQueue.poll();
List<Haplotype>hlist = new ArrayList<Haplotype>(); List<Haplotype> hlist = new ArrayList<Haplotype>();
hlist.add(new Haplotype(haplotype1.getBases(), 60)); hlist.add(new Haplotype(haplotype1.getBases(), 60));
for (int k=1; k < haplotypesToCompute; k++) { for (int k = 1; k < haplotypesToCompute; k++) {
Haplotype haplotype2 = consensusHaplotypeQueue.poll(); Haplotype haplotype2 = consensusHaplotypeQueue.poll();
if(haplotype2 == null ) { haplotype2 = haplotype1; } // Sometimes only the reference haplotype can be found if (haplotype2 == null) {
haplotype2 = haplotype1;
} // Sometimes only the reference haplotype can be found
hlist.add(new Haplotype(haplotype2.getBases(), 20)); hlist.add(new Haplotype(haplotype2.getBases(), 20));
} }
return hlist; return hlist;
@ -194,36 +194,43 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
} }
private Haplotype getHaplotypeFromRead(final PileupElement p, final int contextSize, final int locus) { private Haplotype getHaplotypeFromRead(final PileupElement p, final int contextSize, final int locus) {
final SAMRecord read = p.getRead(); final GATKSAMRecord read = p.getRead();
int readOffsetFromPileup = p.getOffset(); int readOffsetFromPileup = p.getOffset();
final byte[] haplotypeBases = new byte[contextSize]; final byte[] haplotypeBases = new byte[contextSize];
Arrays.fill(haplotypeBases, (byte)REGEXP_WILDCARD); Arrays.fill(haplotypeBases, (byte) REGEXP_WILDCARD);
final double[] baseQualities = new double[contextSize]; final double[] baseQualities = new double[contextSize];
Arrays.fill(baseQualities, 0.0); Arrays.fill(baseQualities, 0.0);
byte[] readBases = read.getReadBases(); byte[] readBases = read.getReadBases();
readBases = AlignmentUtils.readToAlignmentByteArray(p.getRead().getCigar(), readBases); // Adjust the read bases based on the Cigar string readBases = AlignmentUtils.readToAlignmentByteArray(read.getCigar(), readBases); // Adjust the read bases based on the Cigar string
byte[] readQuals = read.getBaseQualities(); byte[] readQuals = read.getBaseQualities();
readQuals = AlignmentUtils.readToAlignmentByteArray(p.getRead().getCigar(), readQuals); // Shift the location of the qual scores based on the Cigar string readQuals = AlignmentUtils.readToAlignmentByteArray(read.getCigar(), readQuals); // Shift the location of the qual scores based on the Cigar string
readOffsetFromPileup = AlignmentUtils.calcAlignmentByteArrayOffset(p.getRead().getCigar(), readOffsetFromPileup, p.getRead().getAlignmentStart(), locus); readOffsetFromPileup = AlignmentUtils.calcAlignmentByteArrayOffset(read.getCigar(), p, read.getAlignmentStart(), locus);
final int baseOffsetStart = readOffsetFromPileup - (contextSize - 1)/2; final int baseOffsetStart = readOffsetFromPileup - (contextSize - 1) / 2;
for (int i = 0; i < contextSize; i++ ) { for (int i = 0; i < contextSize; i++) {
final int baseOffset = i + baseOffsetStart; final int baseOffset = i + baseOffsetStart;
if ( baseOffset < 0 ) { if (baseOffset < 0) {
continue; continue;
} }
if ( baseOffset >= readBases.length ) { if (baseOffset >= readBases.length) {
break; break;
} }
if( readQuals[baseOffset] == PileupElement.DELETION_BASE) { readQuals[baseOffset] = PileupElement.DELETION_QUAL; } if (readQuals[baseOffset] == PileupElement.DELETION_BASE) {
if( !BaseUtils.isRegularBase(readBases[baseOffset]) ) { readBases[baseOffset] = (byte)REGEXP_WILDCARD; readQuals[baseOffset] = (byte) 0; } // N's shouldn't be treated as distinct bases readQuals[baseOffset] = PileupElement.DELETION_QUAL;
readQuals[baseOffset] = (byte)Math.min((int)readQuals[baseOffset], p.getMappingQual()); }
if( ((int)readQuals[baseOffset]) < 5 ) { readQuals[baseOffset] = (byte) 0; } // quals less than 5 are used as codes and don't have actual probabilistic meaning behind them if (!BaseUtils.isRegularBase(readBases[baseOffset])) {
readBases[baseOffset] = (byte) REGEXP_WILDCARD;
readQuals[baseOffset] = (byte) 0;
} // N's shouldn't be treated as distinct bases
readQuals[baseOffset] = (byte) Math.min((int) readQuals[baseOffset], p.getMappingQual());
if (((int) readQuals[baseOffset]) < 5) {
readQuals[baseOffset] = (byte) 0;
} // quals less than 5 are used as codes and don't have actual probabilistic meaning behind them
haplotypeBases[i] = readBases[baseOffset]; haplotypeBases[i] = readBases[baseOffset];
baseQualities[i] = (double)readQuals[baseOffset]; baseQualities[i] = (double) readQuals[baseOffset];
} }
return new Haplotype(haplotypeBases, baseQualities); return new Haplotype(haplotypeBases, baseQualities);
@ -238,7 +245,7 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
} }
byte chA, chB; byte chA, chB;
final byte wc = (byte)REGEXP_WILDCARD; final byte wc = (byte) REGEXP_WILDCARD;
final int length = a.length; final int length = a.length;
final byte[] consensusChars = new byte[length]; final byte[] consensusChars = new byte[length];
@ -247,7 +254,7 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
final double[] qualsA = haplotypeA.getQuals(); final double[] qualsA = haplotypeA.getQuals();
final double[] qualsB = haplotypeB.getQuals(); final double[] qualsB = haplotypeB.getQuals();
for (int i=0; i < length; i++) { for (int i = 0; i < length; i++) {
chA = a[i]; chA = a[i];
chB = b[i]; chB = b[i];
@ -257,17 +264,15 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
if ((chA == wc) && (chB == wc)) { if ((chA == wc) && (chB == wc)) {
consensusChars[i] = wc; consensusChars[i] = wc;
consensusQuals[i] = 0.0; consensusQuals[i] = 0.0;
} } else if ((chA == wc)) {
else if ((chA == wc)) {
consensusChars[i] = chB; consensusChars[i] = chB;
consensusQuals[i] = qualsB[i]; consensusQuals[i] = qualsB[i];
} } else if ((chB == wc)) {
else if ((chB == wc)){
consensusChars[i] = chA; consensusChars[i] = chA;
consensusQuals[i] = qualsA[i]; consensusQuals[i] = qualsA[i];
} else { } else {
consensusChars[i] = chA; consensusChars[i] = chA;
consensusQuals[i] = qualsA[i]+qualsB[i]; consensusQuals[i] = qualsA[i] + qualsB[i];
} }
} }
@ -276,31 +281,33 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
// calculate the haplotype scores by walking over all reads and comparing them to the haplotypes // calculate the haplotype scores by walking over all reads and comparing them to the haplotypes
private double scoreReadsAgainstHaplotypes(final List<Haplotype> haplotypes, final ReadBackedPileup pileup, final int contextSize, final int locus) { private double scoreReadsAgainstHaplotypes(final List<Haplotype> haplotypes, final ReadBackedPileup pileup, final int contextSize, final int locus) {
if ( DEBUG ) System.out.printf("HAP1: %s%n", haplotypes.get(0)); if (DEBUG) System.out.printf("HAP1: %s%n", haplotypes.get(0));
if ( DEBUG ) System.out.printf("HAP2: %s%n", haplotypes.get(1)); if (DEBUG) System.out.printf("HAP2: %s%n", haplotypes.get(1));
final ArrayList<double[]> haplotypeScores = new ArrayList<double[]>(); final ArrayList<double[]> haplotypeScores = new ArrayList<double[]>();
for ( final PileupElement p : pileup ) { for (final PileupElement p : pileup) {
// Score all the reads in the pileup, even the filtered ones // Score all the reads in the pileup, even the filtered ones
final double[] scores = new double[haplotypes.size()]; final double[] scores = new double[haplotypes.size()];
for ( int i = 0; i < haplotypes.size(); i++ ) { for (int i = 0; i < haplotypes.size(); i++) {
final Haplotype haplotype = haplotypes.get(i); final Haplotype haplotype = haplotypes.get(i);
final double score = scoreReadAgainstHaplotype(p, contextSize, haplotype, locus); final double score = scoreReadAgainstHaplotype(p, contextSize, haplotype, locus);
scores[i] = score; scores[i] = score;
if ( DEBUG ) { System.out.printf(" vs. haplotype %d = %f%n", i, score); } if (DEBUG) {
System.out.printf(" vs. haplotype %d = %f%n", i, score);
}
} }
haplotypeScores.add(scores); haplotypeScores.add(scores);
} }
double overallScore = 0.0; double overallScore = 0.0;
for ( final double[] readHaplotypeScores : haplotypeScores ) { for (final double[] readHaplotypeScores : haplotypeScores) {
overallScore += MathUtils.arrayMin(readHaplotypeScores); overallScore += MathUtils.arrayMin(readHaplotypeScores);
} }
return overallScore; return overallScore;
} }
private double scoreReadAgainstHaplotype(final PileupElement p, final int contextSize, final Haplotype haplotype, final int locus ) { private double scoreReadAgainstHaplotype(final PileupElement p, final int contextSize, final Haplotype haplotype, final int locus) {
double expected = 0.0; double expected = 0.0;
double mismatches = 0.0; double mismatches = 0.0;
@ -315,33 +322,35 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
// the chance that it is actually a mismatch is 1 - e, since any of the other 3 options would be a mismatch. // the chance that it is actually a mismatch is 1 - e, since any of the other 3 options would be a mismatch.
// so the probability-weighted mismatch rate is sum_i ( matched ? e_i / 3 : 1 - e_i ) for i = 1 ... n // so the probability-weighted mismatch rate is sum_i ( matched ? e_i / 3 : 1 - e_i ) for i = 1 ... n
final byte[] haplotypeBases = haplotype.getBases(); final byte[] haplotypeBases = haplotype.getBases();
final SAMRecord read = p.getRead(); final GATKSAMRecord read = p.getRead();
byte[] readBases = read.getReadBases(); byte[] readBases = read.getReadBases();
readBases = AlignmentUtils.readToAlignmentByteArray(p.getRead().getCigar(), readBases); // Adjust the read bases based on the Cigar string readBases = AlignmentUtils.readToAlignmentByteArray(p.getRead().getCigar(), readBases); // Adjust the read bases based on the Cigar string
byte[] readQuals = read.getBaseQualities(); byte[] readQuals = read.getBaseQualities();
readQuals = AlignmentUtils.readToAlignmentByteArray(p.getRead().getCigar(), readQuals); // Shift the location of the qual scores based on the Cigar string readQuals = AlignmentUtils.readToAlignmentByteArray(p.getRead().getCigar(), readQuals); // Shift the location of the qual scores based on the Cigar string
int readOffsetFromPileup = p.getOffset(); int readOffsetFromPileup = p.getOffset();
readOffsetFromPileup = AlignmentUtils.calcAlignmentByteArrayOffset(p.getRead().getCigar(), readOffsetFromPileup, p.getRead().getAlignmentStart(), locus); readOffsetFromPileup = AlignmentUtils.calcAlignmentByteArrayOffset(p.getRead().getCigar(), p, read.getAlignmentStart(), locus);
final int baseOffsetStart = readOffsetFromPileup - (contextSize - 1)/2; final int baseOffsetStart = readOffsetFromPileup - (contextSize - 1) / 2;
for ( int i = 0; i < contextSize; i++ ) { for (int i = 0; i < contextSize; i++) {
final int baseOffset = i + baseOffsetStart; final int baseOffset = i + baseOffsetStart;
if ( baseOffset < 0 ) { if (baseOffset < 0) {
continue; continue;
} }
if ( baseOffset >= readBases.length ) { if (baseOffset >= readBases.length) {
break; break;
} }
final byte haplotypeBase = haplotypeBases[i]; final byte haplotypeBase = haplotypeBases[i];
final byte readBase = readBases[baseOffset]; final byte readBase = readBases[baseOffset];
final boolean matched = ( readBase == haplotypeBase || haplotypeBase == (byte)REGEXP_WILDCARD ); final boolean matched = (readBase == haplotypeBase || haplotypeBase == (byte) REGEXP_WILDCARD);
byte qual = readQuals[baseOffset]; byte qual = readQuals[baseOffset];
if( qual == PileupElement.DELETION_BASE ) { qual = PileupElement.DELETION_QUAL; } // calcAlignmentByteArrayOffset fills the readQuals array with DELETION_BASE at deletions if (qual == PileupElement.DELETION_BASE) {
qual = (byte)Math.min((int)qual, p.getMappingQual()); qual = PileupElement.DELETION_QUAL;
if( ((int) qual) >= 5 ) { // quals less than 5 are used as codes and don't have actual probabilistic meaning behind them } // calcAlignmentByteArrayOffset fills the readQuals array with DELETION_BASE at deletions
qual = (byte) Math.min((int) qual, p.getMappingQual());
if (((int) qual) >= 5) { // quals less than 5 are used as codes and don't have actual probabilistic meaning behind them
final double e = QualityUtils.qualToErrorProb(qual); final double e = QualityUtils.qualToErrorProb(qual);
expected += e; expected += e;
mismatches += matched ? e : 1.0 - e / 3.0; mismatches += matched ? e : 1.0 - e / 3.0;
@ -355,26 +364,27 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
} }
private Double scoreIndelsAgainstHaplotypes(final ReadBackedPileup pileup) { private Double scoreIndelsAgainstHaplotypes(final ReadBackedPileup pileup) {
final ArrayList<double[]> haplotypeScores = new ArrayList<double[]>(); final ArrayList<double[]> haplotypeScores = new ArrayList<double[]>();
final HashMap<PileupElement,LinkedHashMap<Allele,Double>> indelLikelihoodMap = IndelGenotypeLikelihoodsCalculationModel.getIndelLikelihoodMap(); final HashMap<PileupElement, LinkedHashMap<Allele, Double>> indelLikelihoodMap = IndelGenotypeLikelihoodsCalculationModel.getIndelLikelihoodMap();
if (indelLikelihoodMap== null) if (indelLikelihoodMap == null)
return null; return null;
for (final PileupElement p: pileup) { for (final PileupElement p : pileup) {
if (indelLikelihoodMap.containsKey(p)) { if (indelLikelihoodMap.containsKey(p)) {
// retrieve likelihood information corresponding to this read // retrieve likelihood information corresponding to this read
LinkedHashMap<Allele,Double> el = indelLikelihoodMap.get(p); LinkedHashMap<Allele, Double> el = indelLikelihoodMap.get(p);
// Score all the reads in the pileup, even the filtered ones // Score all the reads in the pileup, even the filtered ones
final double[] scores = new double[el.size()]; final double[] scores = new double[el.size()];
int i = 0; int i = 0;
for (Allele a: el.keySet() ) { for (Allele a : el.keySet()) {
scores[i++] = -el.get(a); scores[i++] = -el.get(a);
if ( DEBUG ) { System.out.printf(" vs. haplotype %d = %f%n", i-1, scores[i-1]); } if (DEBUG) {
System.out.printf(" vs. haplotype %d = %f%n", i - 1, scores[i - 1]);
}
} }
haplotypeScores.add(scores); haplotypeScores.add(scores);
@ -383,7 +393,7 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
// indel likelihoods are stric log-probs, not phred scored // indel likelihoods are stric log-probs, not phred scored
double overallScore = 0.0; double overallScore = 0.0;
for ( final double[] readHaplotypeScores : haplotypeScores ) { for (final double[] readHaplotypeScores : haplotypeScores) {
overallScore += MathUtils.arrayMin(readHaplotypeScores); overallScore += MathUtils.arrayMin(readHaplotypeScores);
} }
@ -392,6 +402,11 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
} }
public List<String> getKeyNames() { return Arrays.asList("HaplotypeScore"); } public List<String> getKeyNames() {
public List<VCFInfoHeaderLine> getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("HaplotypeScore", 1, VCFHeaderLineType.Float, "Consistency of the site with at most two segregating haplotypes")); } return Arrays.asList("HaplotypeScore");
}
public List<VCFInfoHeaderLine> getDescriptions() {
return Arrays.asList(new VCFInfoHeaderLine("HaplotypeScore", 1, VCFHeaderLineType.Float, "Consistency of the site with at most two segregating haplotypes"));
}
} }

View File

@ -30,11 +30,11 @@ public abstract class RankSumTest extends InfoFieldAnnotation implements Standar
static final boolean DEBUG = false; static final boolean DEBUG = false;
public Map<String, Object> annotate(RefMetaDataTracker tracker, AnnotatorCompatibleWalker walker, ReferenceContext ref, Map<String, AlignmentContext> stratifiedContexts, VariantContext vc) { public Map<String, Object> annotate(RefMetaDataTracker tracker, AnnotatorCompatibleWalker walker, ReferenceContext ref, Map<String, AlignmentContext> stratifiedContexts, VariantContext vc) {
if ( stratifiedContexts.size() == 0 ) if (stratifiedContexts.size() == 0)
return null; return null;
final GenotypesContext genotypes = vc.getGenotypes(); final GenotypesContext genotypes = vc.getGenotypes();
if ( genotypes == null || genotypes.size() == 0 ) if (genotypes == null || genotypes.size() == 0)
return null; return null;
@ -43,19 +43,18 @@ public abstract class RankSumTest extends InfoFieldAnnotation implements Standar
if (vc.isSNP() && vc.isBiallelic()) { if (vc.isSNP() && vc.isBiallelic()) {
// todo - no current support for multiallelic snps // todo - no current support for multiallelic snps
for ( final Genotype genotype : genotypes.iterateInSampleNameOrder() ) { for (final Genotype genotype : genotypes.iterateInSampleNameOrder()) {
final AlignmentContext context = stratifiedContexts.get(genotype.getSampleName()); final AlignmentContext context = stratifiedContexts.get(genotype.getSampleName());
if ( context == null ) { if (context == null) {
continue; continue;
} }
fillQualsFromPileup(ref.getBase(), vc.getAlternateAllele(0).getBases()[0], context.getBasePileup(), refQuals, altQuals); fillQualsFromPileup(ref.getBase(), vc.getAlternateAllele(0).getBases()[0], context.getBasePileup(), refQuals, altQuals);
} }
} } else if (vc.isIndel() || vc.isMixed()) {
else if (vc.isIndel() || vc.isMixed()) {
for ( final Genotype genotype : genotypes.iterateInSampleNameOrder() ) { for (final Genotype genotype : genotypes.iterateInSampleNameOrder()) {
final AlignmentContext context = stratifiedContexts.get(genotype.getSampleName()); final AlignmentContext context = stratifiedContexts.get(genotype.getSampleName());
if ( context == null ) { if (context == null) {
continue; continue;
} }
@ -74,46 +73,47 @@ public abstract class RankSumTest extends InfoFieldAnnotation implements Standar
fillIndelQualsFromPileup(pileup, refQuals, altQuals); fillIndelQualsFromPileup(pileup, refQuals, altQuals);
} }
} } else
else
return null; return null;
final MannWhitneyU mannWhitneyU = new MannWhitneyU(); final MannWhitneyU mannWhitneyU = new MannWhitneyU();
for ( final Double qual : altQuals ) { for (final Double qual : altQuals) {
mannWhitneyU.add(qual, MannWhitneyU.USet.SET1); mannWhitneyU.add(qual, MannWhitneyU.USet.SET1);
} }
for ( final Double qual : refQuals ) { for (final Double qual : refQuals) {
mannWhitneyU.add(qual, MannWhitneyU.USet.SET2); mannWhitneyU.add(qual, MannWhitneyU.USet.SET2);
} }
if (DEBUG) { if (DEBUG) {
System.out.format("%s, REF QUALS:",this.getClass().getName()); System.out.format("%s, REF QUALS:", this.getClass().getName());
for ( final Double qual : refQuals ) for (final Double qual : refQuals)
System.out.format("%4.1f ",qual); System.out.format("%4.1f ", qual);
System.out.println(); System.out.println();
System.out.format("%s, ALT QUALS:",this.getClass().getName()); System.out.format("%s, ALT QUALS:", this.getClass().getName());
for ( final Double qual : altQuals ) for (final Double qual : altQuals)
System.out.format("%4.1f ",qual); System.out.format("%4.1f ", qual);
System.out.println(); System.out.println();
} }
// we are testing that set1 (the alt bases) have lower quality scores than set2 (the ref bases) // we are testing that set1 (the alt bases) have lower quality scores than set2 (the ref bases)
final Pair<Double,Double> testResults = mannWhitneyU.runOneSidedTest( MannWhitneyU.USet.SET1 ); final Pair<Double, Double> testResults = mannWhitneyU.runOneSidedTest(MannWhitneyU.USet.SET1);
final Map<String, Object> map = new HashMap<String, Object>(); final Map<String, Object> map = new HashMap<String, Object>();
if ( ! Double.isNaN(testResults.first) ) if (!Double.isNaN(testResults.first))
map.put(getKeyNames().get(0), String.format("%.3f", testResults.first)); map.put(getKeyNames().get(0), String.format("%.3f", testResults.first));
return map; return map;
} }
protected abstract void fillQualsFromPileup(byte ref, byte alt, ReadBackedPileup pileup, List<Double> refQuals, List<Double> altQuals); protected abstract void fillQualsFromPileup(byte ref, byte alt, ReadBackedPileup pileup, List<Double> refQuals, List<Double> altQuals);
protected abstract void fillIndelQualsFromPileup(ReadBackedPileup pileup, List<Double> refQuals, List<Double> altQuals); protected abstract void fillIndelQualsFromPileup(ReadBackedPileup pileup, List<Double> refQuals, List<Double> altQuals);
protected static boolean isUsableBase( final PileupElement p ) { protected static boolean isUsableBase(final PileupElement p) {
return !( p.isDeletion() || return !(p.isInsertionAtBeginningOfRead() ||
p.getMappingQual() == 0 || p.isDeletion() ||
p.getMappingQual() == QualityUtils.MAPPING_QUALITY_UNAVAILABLE || p.getMappingQual() == 0 ||
((int)p.getQual()) < QualityUtils.MIN_USABLE_Q_SCORE ); // need the unBAQed quality score here p.getMappingQual() == QualityUtils.MAPPING_QUALITY_UNAVAILABLE ||
((int) p.getQual()) < QualityUtils.MIN_USABLE_Q_SCORE); // need the unBAQed quality score here
} }
} }

View File

@ -24,27 +24,32 @@ import java.util.List;
*/ */
public class ReadPosRankSumTest extends RankSumTest { public class ReadPosRankSumTest extends RankSumTest {
public List<String> getKeyNames() { return Arrays.asList("ReadPosRankSum"); } public List<String> getKeyNames() {
return Arrays.asList("ReadPosRankSum");
}
public List<VCFInfoHeaderLine> getDescriptions() { return Arrays.asList(new VCFInfoHeaderLine("ReadPosRankSum", 1, VCFHeaderLineType.Float, "Z-score from Wilcoxon rank sum test of Alt vs. Ref read position bias")); } public List<VCFInfoHeaderLine> getDescriptions() {
return Arrays.asList(new VCFInfoHeaderLine("ReadPosRankSum", 1, VCFHeaderLineType.Float, "Z-score from Wilcoxon rank sum test of Alt vs. Ref read position bias"));
}
protected void fillQualsFromPileup(byte ref, byte alt, ReadBackedPileup pileup, List<Double> refQuals, List<Double> altQuals) { protected void fillQualsFromPileup(byte ref, byte alt, ReadBackedPileup pileup, List<Double> refQuals, List<Double> altQuals) {
for ( final PileupElement p : pileup ) { for (final PileupElement p : pileup) {
if( isUsableBase(p) ) { if (isUsableBase(p)) {
int readPos = AlignmentUtils.calcAlignmentByteArrayOffset(p.getRead().getCigar(), p.getOffset(), 0, 0); int readPos = AlignmentUtils.calcAlignmentByteArrayOffset(p.getRead().getCigar(), p, 0, 0);
final int numAlignedBases = AlignmentUtils.getNumAlignedBases(p.getRead()); final int numAlignedBases = AlignmentUtils.getNumAlignedBases(p.getRead());
if( readPos > numAlignedBases / 2 ) { if (readPos > numAlignedBases / 2)
readPos = numAlignedBases - ( readPos + 1 ); readPos = numAlignedBases - (readPos + 1);
}
if (p.getBase() == ref)
refQuals.add((double) readPos);
else if (p.getBase() == alt)
altQuals.add((double) readPos);
if ( p.getBase() == ref ) {
refQuals.add( (double)readPos );
} else if ( p.getBase() == alt ) {
altQuals.add( (double)readPos );
}
} }
} }
} }
protected void fillIndelQualsFromPileup(ReadBackedPileup pileup, List<Double> refQuals, List<Double> altQuals) { protected void fillIndelQualsFromPileup(ReadBackedPileup pileup, List<Double> refQuals, List<Double> altQuals) {
// equivalent is whether indel likelihoods for reads corresponding to ref allele are more likely than reads corresponding to alt allele // equivalent is whether indel likelihoods for reads corresponding to ref allele are more likely than reads corresponding to alt allele
// to classify a pileup element as ref or alt, we look at the likelihood associated with the allele associated to this element. // to classify a pileup element as ref or alt, we look at the likelihood associated with the allele associated to this element.
@ -52,18 +57,15 @@ public class ReadPosRankSumTest extends RankSumTest {
// To classify a pileup element as Ref or Alt, we look at the likelihood of corresponding alleles. // To classify a pileup element as Ref or Alt, we look at the likelihood of corresponding alleles.
// If likelihood of ref allele > highest likelihood of all alt alleles + epsilon, then this pielup element is "ref" // If likelihood of ref allele > highest likelihood of all alt alleles + epsilon, then this pielup element is "ref"
// otherwise if highest alt allele likelihood is > ref likelihood + epsilon, then this pileup element it "alt" // otherwise if highest alt allele likelihood is > ref likelihood + epsilon, then this pileup element it "alt"
final HashMap<PileupElement,LinkedHashMap<Allele,Double>> indelLikelihoodMap = IndelGenotypeLikelihoodsCalculationModel.getIndelLikelihoodMap(); final HashMap<PileupElement, LinkedHashMap<Allele, Double>> indelLikelihoodMap = IndelGenotypeLikelihoodsCalculationModel.getIndelLikelihoodMap();
for (final PileupElement p: pileup) { for (final PileupElement p : pileup) {
if (indelLikelihoodMap.containsKey(p)) { if (indelLikelihoodMap.containsKey(p)) {
// retrieve likelihood information corresponding to this read LinkedHashMap<Allele, Double> el = indelLikelihoodMap.get(p); // retrieve likelihood information corresponding to this read
LinkedHashMap<Allele,Double> el = indelLikelihoodMap.get(p); double refLikelihood = 0.0, altLikelihood = Double.NEGATIVE_INFINITY; // by design, first element in LinkedHashMap was ref allele
// by design, first element in LinkedHashMap was ref allele
double refLikelihood=0.0, altLikelihood=Double.NEGATIVE_INFINITY;
for (Allele a : el.keySet()) { for (Allele a : el.keySet()) {
if (a.isReference()) if (a.isReference())
refLikelihood =el.get(a); refLikelihood = el.get(a);
else { else {
double like = el.get(a); double like = el.get(a);
if (like >= altLikelihood) if (like >= altLikelihood)
@ -75,23 +77,22 @@ public class ReadPosRankSumTest extends RankSumTest {
final int numAlignedBases = getNumAlignedBases(p.getRead()); final int numAlignedBases = getNumAlignedBases(p.getRead());
int rp = readPos; int rp = readPos;
if( readPos > numAlignedBases / 2 ) { if (readPos > numAlignedBases / 2) {
readPos = numAlignedBases - ( readPos + 1 ); readPos = numAlignedBases - (readPos + 1);
} }
//if (DEBUG) System.out.format("R:%s start:%d C:%s offset:%d rp:%d readPos:%d alignedB:%d\n",p.getRead().getReadName(),p.getRead().getAlignmentStart(),p.getRead().getCigarString(),p.getOffset(), rp, readPos, numAlignedBases); //if (DEBUG) System.out.format("R:%s start:%d C:%s offset:%d rp:%d readPos:%d alignedB:%d\n",p.getRead().getReadName(),p.getRead().getAlignmentStart(),p.getRead().getCigarString(),p.getOffset(), rp, readPos, numAlignedBases);
// if event is beyond span of read just return and don't consider this element. This can happen, for example, with reads // if event is beyond span of read just return and don't consider this element. This can happen, for example, with reads
// where soft clipping still left strings of low quality bases but these are later removed by indel-specific clipping. // where soft clipping still left strings of low quality bases but these are later removed by indel-specific clipping.
// if (readPos < -1) // if (readPos < -1)
// return; // return;
if (refLikelihood > (altLikelihood + INDEL_LIKELIHOOD_THRESH)) { if (refLikelihood > (altLikelihood + INDEL_LIKELIHOOD_THRESH)) {
refQuals.add((double)readPos); refQuals.add((double) readPos);
//if (DEBUG) System.out.format("REF like: %4.1f, pos: %d\n",refLikelihood,readPos); //if (DEBUG) System.out.format("REF like: %4.1f, pos: %d\n",refLikelihood,readPos);
} } else if (altLikelihood > (refLikelihood + INDEL_LIKELIHOOD_THRESH)) {
else if (altLikelihood > (refLikelihood + INDEL_LIKELIHOOD_THRESH)) { altQuals.add((double) readPos);
altQuals.add((double)readPos); //if (DEBUG) System.out.format("ALT like: %4.1f, pos: %d\n",refLikelihood,readPos);
//if (DEBUG) System.out.format("ALT like: %4.1f, pos: %d\n",refLikelihood,readPos);
} }
@ -115,7 +116,7 @@ public class ReadPosRankSumTest extends RankSumTest {
// Do a stricter base clipping than provided by CIGAR string, since this one may be too conservative, // Do a stricter base clipping than provided by CIGAR string, since this one may be too conservative,
// and may leave a string of Q2 bases still hanging off the reads. // and may leave a string of Q2 bases still hanging off the reads.
for (int i=numStartClippedBases; i < unclippedReadBases.length; i++) { for (int i = numStartClippedBases; i < unclippedReadBases.length; i++) {
if (unclippedReadQuals[i] < PairHMMIndelErrorModel.BASE_QUAL_THRESHOLD) if (unclippedReadQuals[i] < PairHMMIndelErrorModel.BASE_QUAL_THRESHOLD)
numStartClippedBases++; numStartClippedBases++;
else else
@ -134,7 +135,7 @@ public class ReadPosRankSumTest extends RankSumTest {
// compute total number of clipped bases (soft or hard clipped) // compute total number of clipped bases (soft or hard clipped)
// check for hard clips (never consider these bases): // check for hard clips (never consider these bases):
final Cigar c = read.getCigar(); final Cigar c = read.getCigar();
CigarElement last = c.getCigarElement(c.numCigarElements()-1); CigarElement last = c.getCigarElement(c.numCigarElements() - 1);
int numEndClippedBases = 0; int numEndClippedBases = 0;
if (last.getOperator() == CigarOperator.H) { if (last.getOperator() == CigarOperator.H) {
@ -145,7 +146,7 @@ public class ReadPosRankSumTest extends RankSumTest {
// Do a stricter base clipping than provided by CIGAR string, since this one may be too conservative, // Do a stricter base clipping than provided by CIGAR string, since this one may be too conservative,
// and may leave a string of Q2 bases still hanging off the reads. // and may leave a string of Q2 bases still hanging off the reads.
for (int i=unclippedReadBases.length-numEndClippedBases-1; i >= 0; i-- ){ for (int i = unclippedReadBases.length - numEndClippedBases - 1; i >= 0; i--) {
if (unclippedReadQuals[i] < PairHMMIndelErrorModel.BASE_QUAL_THRESHOLD) if (unclippedReadQuals[i] < PairHMMIndelErrorModel.BASE_QUAL_THRESHOLD)
numEndClippedBases++; numEndClippedBases++;
else else
@ -157,8 +158,6 @@ public class ReadPosRankSumTest extends RankSumTest {
} }
int getOffsetFromClippedReadStart(SAMRecord read, int offset) { int getOffsetFromClippedReadStart(SAMRecord read, int offset) {
return offset - getNumClippedBasesAtStart(read);
return offset - getNumClippedBasesAtStart(read);
} }
} }

View File

@ -278,7 +278,7 @@ public class DiploidSNPGenotypeLikelihoods implements Cloneable {
if ( qual == 0 ) if ( qual == 0 )
return 0; return 0;
if ( elt.isReducedRead() ) { if ( elt.getRead().isReducedRead() ) {
// reduced read representation // reduced read representation
if ( BaseUtils.isRegularBase( obsBase )) { if ( BaseUtils.isRegularBase( obsBase )) {
int representativeCount = elt.getRepresentativeCount(); int representativeCount = elt.getRepresentativeCount();

View File

@ -60,14 +60,14 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
private final int maxAlternateAlleles; private final int maxAlternateAlleles;
private PairHMMIndelErrorModel pairModel; private PairHMMIndelErrorModel pairModel;
private static ThreadLocal<HashMap<PileupElement,LinkedHashMap<Allele,Double>>> indelLikelihoodMap = private static ThreadLocal<HashMap<PileupElement, LinkedHashMap<Allele, Double>>> indelLikelihoodMap =
new ThreadLocal<HashMap<PileupElement,LinkedHashMap<Allele,Double>>>() { new ThreadLocal<HashMap<PileupElement, LinkedHashMap<Allele, Double>>>() {
protected synchronized HashMap<PileupElement,LinkedHashMap<Allele,Double>> initialValue() { protected synchronized HashMap<PileupElement, LinkedHashMap<Allele, Double>> initialValue() {
return new HashMap<PileupElement,LinkedHashMap<Allele,Double>>(); return new HashMap<PileupElement, LinkedHashMap<Allele, Double>>();
} }
}; };
private LinkedHashMap<Allele,Haplotype> haplotypeMap; private LinkedHashMap<Allele, Haplotype> haplotypeMap;
// gdebug removeme // gdebug removeme
// todo -cleanup // todo -cleanup
@ -75,13 +75,13 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
private ArrayList<Allele> alleleList; private ArrayList<Allele> alleleList;
static { static {
indelLikelihoodMap.set(new HashMap<PileupElement,LinkedHashMap<Allele,Double>>()); indelLikelihoodMap.set(new HashMap<PileupElement, LinkedHashMap<Allele, Double>>());
} }
protected IndelGenotypeLikelihoodsCalculationModel(UnifiedArgumentCollection UAC, Logger logger) { protected IndelGenotypeLikelihoodsCalculationModel(UnifiedArgumentCollection UAC, Logger logger) {
super(UAC, logger); super(UAC, logger);
pairModel = new PairHMMIndelErrorModel(UAC.INDEL_GAP_OPEN_PENALTY,UAC.INDEL_GAP_CONTINUATION_PENALTY, pairModel = new PairHMMIndelErrorModel(UAC.INDEL_GAP_OPEN_PENALTY, UAC.INDEL_GAP_CONTINUATION_PENALTY,
UAC.OUTPUT_DEBUG_INDEL_INFO, !UAC.DONT_DO_BANDED_INDEL_COMPUTATION); UAC.OUTPUT_DEBUG_INDEL_INFO, !UAC.DONT_DO_BANDED_INDEL_COMPUTATION);
alleleList = new ArrayList<Allele>(); alleleList = new ArrayList<Allele>();
getAlleleListFromVCF = UAC.GenotypingMode == GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES; getAlleleListFromVCF = UAC.GenotypingMode == GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES;
@ -91,7 +91,7 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
maxAlternateAlleles = UAC.MAX_ALTERNATE_ALLELES; maxAlternateAlleles = UAC.MAX_ALTERNATE_ALLELES;
doMultiAllelicCalls = UAC.MULTI_ALLELIC; doMultiAllelicCalls = UAC.MULTI_ALLELIC;
haplotypeMap = new LinkedHashMap<Allele,Haplotype>(); haplotypeMap = new LinkedHashMap<Allele, Haplotype>();
ignoreSNPAllelesWhenGenotypingIndels = UAC.IGNORE_SNP_ALLELES; ignoreSNPAllelesWhenGenotypingIndels = UAC.IGNORE_SNP_ALLELES;
} }
@ -99,15 +99,15 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
private ArrayList<Allele> computeConsensusAlleles(ReferenceContext ref, private ArrayList<Allele> computeConsensusAlleles(ReferenceContext ref,
Map<String, AlignmentContext> contexts, Map<String, AlignmentContext> contexts,
AlignmentContextUtils.ReadOrientation contextType, GenomeLocParser locParser) { AlignmentContextUtils.ReadOrientation contextType, GenomeLocParser locParser) {
Allele refAllele=null, altAllele=null; Allele refAllele = null, altAllele = null;
GenomeLoc loc = ref.getLocus(); GenomeLoc loc = ref.getLocus();
ArrayList<Allele> aList = new ArrayList<Allele>(); ArrayList<Allele> aList = new ArrayList<Allele>();
HashMap<String,Integer> consensusIndelStrings = new HashMap<String,Integer>(); HashMap<String, Integer> consensusIndelStrings = new HashMap<String, Integer>();
int insCount = 0, delCount = 0; int insCount = 0, delCount = 0;
// quick check of total number of indels in pileup // quick check of total number of indels in pileup
for ( Map.Entry<String, AlignmentContext> sample : contexts.entrySet() ) { for (Map.Entry<String, AlignmentContext> sample : contexts.entrySet()) {
AlignmentContext context = AlignmentContextUtils.stratify(sample.getValue(), contextType); AlignmentContext context = AlignmentContextUtils.stratify(sample.getValue(), contextType);
final ReadBackedExtendedEventPileup indelPileup = context.getExtendedEventPileup(); final ReadBackedExtendedEventPileup indelPileup = context.getExtendedEventPileup();
@ -118,21 +118,19 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
if (insCount < minIndelCountForGenotyping && delCount < minIndelCountForGenotyping) if (insCount < minIndelCountForGenotyping && delCount < minIndelCountForGenotyping)
return aList; return aList;
for ( Map.Entry<String, AlignmentContext> sample : contexts.entrySet() ) { for (Map.Entry<String, AlignmentContext> sample : contexts.entrySet()) {
// todo -- warning, can be duplicating expensive partition here // todo -- warning, can be duplicating expensive partition here
AlignmentContext context = AlignmentContextUtils.stratify(sample.getValue(), contextType); AlignmentContext context = AlignmentContextUtils.stratify(sample.getValue(), contextType);
final ReadBackedExtendedEventPileup indelPileup = context.getExtendedEventPileup(); final ReadBackedExtendedEventPileup indelPileup = context.getExtendedEventPileup();
for (ExtendedEventPileupElement p : indelPileup.toExtendedIterable()) {
for ( ExtendedEventPileupElement p : indelPileup.toExtendedIterable() ) {
//SAMRecord read = p.getRead(); //SAMRecord read = p.getRead();
GATKSAMRecord read = ReadClipper.hardClipAdaptorSequence(p.getRead()); GATKSAMRecord read = ReadClipper.hardClipAdaptorSequence(p.getRead());
if (read == null) if (read == null)
continue; continue;
if(ReadUtils.is454Read(read)) { if (ReadUtils.is454Read(read)) {
continue; continue;
} }
@ -151,62 +149,57 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
// In this case, the read could have any of the inserted bases and we need to build a consensus // In this case, the read could have any of the inserted bases and we need to build a consensus
for (String s : consensusIndelStrings.keySet()) { for (String s : consensusIndelStrings.keySet()) {
int cnt = consensusIndelStrings.get(s); int cnt = consensusIndelStrings.get(s);
if (s.startsWith(indelString)){ if (s.startsWith(indelString)) {
// case 1: current insertion is prefix of indel in hash map // case 1: current insertion is prefix of indel in hash map
consensusIndelStrings.put(s,cnt+1); consensusIndelStrings.put(s, cnt + 1);
foundKey = true; foundKey = true;
break; break;
} } else if (indelString.startsWith(s)) {
else if (indelString.startsWith(s)) {
// case 2: indel stored in hash table is prefix of current insertion // case 2: indel stored in hash table is prefix of current insertion
// In this case, new bases are new key. // In this case, new bases are new key.
consensusIndelStrings.remove(s); consensusIndelStrings.remove(s);
consensusIndelStrings.put(indelString,cnt+1); consensusIndelStrings.put(indelString, cnt + 1);
foundKey = true; foundKey = true;
break; break;
} }
} }
if (!foundKey) if (!foundKey)
// none of the above: event bases not supported by previous table, so add new key // none of the above: event bases not supported by previous table, so add new key
consensusIndelStrings.put(indelString,1); consensusIndelStrings.put(indelString, 1);
} } else if (read.getAlignmentStart() == loc.getStart() + 1) {
else if (read.getAlignmentStart() == loc.getStart()+1) {
// opposite corner condition: read will start at current locus with an insertion // opposite corner condition: read will start at current locus with an insertion
for (String s : consensusIndelStrings.keySet()) { for (String s : consensusIndelStrings.keySet()) {
int cnt = consensusIndelStrings.get(s); int cnt = consensusIndelStrings.get(s);
if (s.endsWith(indelString)){ if (s.endsWith(indelString)) {
// case 1: current insertion is suffix of indel in hash map // case 1: current insertion is suffix of indel in hash map
consensusIndelStrings.put(s,cnt+1); consensusIndelStrings.put(s, cnt + 1);
foundKey = true; foundKey = true;
break; break;
} } else if (indelString.endsWith(s)) {
else if (indelString.endsWith(s)) {
// case 2: indel stored in hash table is suffix of current insertion // case 2: indel stored in hash table is suffix of current insertion
// In this case, new bases are new key. // In this case, new bases are new key.
consensusIndelStrings.remove(s); consensusIndelStrings.remove(s);
consensusIndelStrings.put(indelString,cnt+1); consensusIndelStrings.put(indelString, cnt + 1);
foundKey = true; foundKey = true;
break; break;
} }
} }
if (!foundKey) if (!foundKey)
// none of the above: event bases not supported by previous table, so add new key // none of the above: event bases not supported by previous table, so add new key
consensusIndelStrings.put(indelString,1); consensusIndelStrings.put(indelString, 1);
} } else {
else {
// normal case: insertion somewhere in the middle of a read: add count to hash map // normal case: insertion somewhere in the middle of a read: add count to hash map
int cnt = consensusIndelStrings.containsKey(indelString)? consensusIndelStrings.get(indelString):0; int cnt = consensusIndelStrings.containsKey(indelString) ? consensusIndelStrings.get(indelString) : 0;
consensusIndelStrings.put(indelString,cnt+1); consensusIndelStrings.put(indelString, cnt + 1);
} }
} } else if (p.isDeletion()) {
else if (p.isDeletion()) { indelString = String.format("D%d", p.getEventLength());
indelString = String.format("D%d",p.getEventLength()); int cnt = consensusIndelStrings.containsKey(indelString) ? consensusIndelStrings.get(indelString) : 0;
int cnt = consensusIndelStrings.containsKey(indelString)? consensusIndelStrings.get(indelString):0; consensusIndelStrings.put(indelString, cnt + 1);
consensusIndelStrings.put(indelString,cnt+1);
} }
} }
@ -227,18 +220,17 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
// get deletion length // get deletion length
int dLen = Integer.valueOf(s.substring(1)); int dLen = Integer.valueOf(s.substring(1));
// get ref bases of accurate deletion // get ref bases of accurate deletion
int startIdxInReference = 1+loc.getStart()-ref.getWindow().getStart(); int startIdxInReference = 1 + loc.getStart() - ref.getWindow().getStart();
stop = loc.getStart() + dLen; stop = loc.getStart() + dLen;
byte[] refBases = Arrays.copyOfRange(ref.getBases(),startIdxInReference,startIdxInReference+dLen); byte[] refBases = Arrays.copyOfRange(ref.getBases(), startIdxInReference, startIdxInReference + dLen);
if (Allele.acceptableAlleleBases(refBases)) { if (Allele.acceptableAlleleBases(refBases)) {
refAllele = Allele.create(refBases,true); refAllele = Allele.create(refBases, true);
altAllele = Allele.create(Allele.NULL_ALLELE_STRING, false); altAllele = Allele.create(Allele.NULL_ALLELE_STRING, false);
} }
} } else {
else {
// insertion case // insertion case
if (Allele.acceptableAlleleBases(s)) { if (Allele.acceptableAlleleBases(s)) {
refAllele = Allele.create(Allele.NULL_ALLELE_STRING, true); refAllele = Allele.create(Allele.NULL_ALLELE_STRING, true);
altAllele = Allele.create(s, false); altAllele = Allele.create(s, false);
stop = loc.getStart(); stop = loc.getStart();
@ -288,7 +280,7 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
Allele alternateAlleleToUse, Allele alternateAlleleToUse,
boolean useBAQedPileup, GenomeLocParser locParser) { boolean useBAQedPileup, GenomeLocParser locParser) {
if ( tracker == null ) if (tracker == null)
return null; return null;
GenomeLoc loc = ref.getLocus(); GenomeLoc loc = ref.getLocus();
@ -299,12 +291,12 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
// starting a new site: clear allele list // starting a new site: clear allele list
alleleList.clear(); alleleList.clear();
lastSiteVisited = ref.getLocus(); lastSiteVisited = ref.getLocus();
indelLikelihoodMap.set(new HashMap<PileupElement,LinkedHashMap<Allele,Double>>()); indelLikelihoodMap.set(new HashMap<PileupElement, LinkedHashMap<Allele, Double>>());
haplotypeMap.clear(); haplotypeMap.clear();
if (getAlleleListFromVCF) { if (getAlleleListFromVCF) {
for( final VariantContext vc_input : tracker.getValues(UAC.alleles, loc) ) { for (final VariantContext vc_input : tracker.getValues(UAC.alleles, loc)) {
if( vc_input != null && if (vc_input != null &&
allowableTypes.contains(vc_input.getType()) && allowableTypes.contains(vc_input.getType()) &&
ref.getLocus().getStart() == vc_input.getStart()) { ref.getLocus().getStart() == vc_input.getStart()) {
vc = vc_input; vc = vc_input;
@ -312,7 +304,7 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
} }
} }
// ignore places where we don't have a variant // ignore places where we don't have a variant
if ( vc == null ) if (vc == null)
return null; return null;
alleleList.clear(); alleleList.clear();
@ -324,15 +316,13 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
else else
alleleList.add(a); alleleList.add(a);
} } else {
else {
for (Allele a : vc.getAlleles()) for (Allele a : vc.getAlleles())
alleleList.add(a); alleleList.add(a);
} }
} } else {
else { alleleList = computeConsensusAlleles(ref, contexts, contextType, locParser);
alleleList = computeConsensusAlleles(ref,contexts, contextType, locParser);
if (alleleList.isEmpty()) if (alleleList.isEmpty())
return null; return null;
} }
@ -342,9 +332,9 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
return null; return null;
// check if there is enough reference window to create haplotypes (can be an issue at end of contigs) // check if there is enough reference window to create haplotypes (can be an issue at end of contigs)
if (ref.getWindow().getStop() < loc.getStop()+HAPLOTYPE_SIZE) if (ref.getWindow().getStop() < loc.getStop() + HAPLOTYPE_SIZE)
return null; return null;
if ( !(priors instanceof DiploidIndelGenotypePriors) ) if (!(priors instanceof DiploidIndelGenotypePriors))
throw new StingException("Only diploid-based Indel priors are supported in the DINDEL GL model"); throw new StingException("Only diploid-based Indel priors are supported in the DINDEL GL model");
if (alleleList.isEmpty()) if (alleleList.isEmpty())
@ -355,8 +345,8 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
// look for alt allele that has biggest length distance to ref allele // look for alt allele that has biggest length distance to ref allele
int maxLenDiff = 0; int maxLenDiff = 0;
for (Allele a: alleleList) { for (Allele a : alleleList) {
if(a.isNonReference()) { if (a.isNonReference()) {
int lenDiff = Math.abs(a.getBaseString().length() - refAllele.getBaseString().length()); int lenDiff = Math.abs(a.getBaseString().length() - refAllele.getBaseString().length());
if (lenDiff > maxLenDiff) { if (lenDiff > maxLenDiff) {
maxLenDiff = lenDiff; maxLenDiff = lenDiff;
@ -366,11 +356,11 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
} }
final int eventLength = altAllele.getBaseString().length() - refAllele.getBaseString().length(); final int eventLength = altAllele.getBaseString().length() - refAllele.getBaseString().length();
final int hsize = (int)ref.getWindow().size()-Math.abs(eventLength)-1; final int hsize = (int) ref.getWindow().size() - Math.abs(eventLength) - 1;
final int numPrefBases= ref.getLocus().getStart()-ref.getWindow().getStart()+1; final int numPrefBases = ref.getLocus().getStart() - ref.getWindow().getStart() + 1;
if (hsize <=0) { if (hsize <= 0) {
logger.warn(String.format("Warning: event at location %s can't be genotyped, skipping",loc.toString())); logger.warn(String.format("Warning: event at location %s can't be genotyped, skipping", loc.toString()));
return null; return null;
} }
haplotypeMap = Haplotype.makeHaplotypeListFromAlleles(alleleList, loc.getStart(), haplotypeMap = Haplotype.makeHaplotypeListFromAlleles(alleleList, loc.getStart(),
@ -388,7 +378,7 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
// For each sample, get genotype likelihoods based on pileup // For each sample, get genotype likelihoods based on pileup
// compute prior likelihoods on haplotypes, and initialize haplotype likelihood matrix with them. // compute prior likelihoods on haplotypes, and initialize haplotype likelihood matrix with them.
for ( Map.Entry<String, AlignmentContext> sample : contexts.entrySet() ) { for (Map.Entry<String, AlignmentContext> sample : contexts.entrySet()) {
AlignmentContext context = AlignmentContextUtils.stratify(sample.getValue(), contextType); AlignmentContext context = AlignmentContextUtils.stratify(sample.getValue(), contextType);
ReadBackedPileup pileup = null; ReadBackedPileup pileup = null;
@ -397,8 +387,8 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
else if (context.hasBasePileup()) else if (context.hasBasePileup())
pileup = context.getBasePileup(); pileup = context.getBasePileup();
if (pileup != null ) { if (pileup != null) {
final double[] genotypeLikelihoods = pairModel.computeReadHaplotypeLikelihoods( pileup, haplotypeMap, ref, eventLength, getIndelLikelihoodMap()); final double[] genotypeLikelihoods = pairModel.computeReadHaplotypeLikelihoods(pileup, haplotypeMap, ref, eventLength, getIndelLikelihoodMap());
GenotypeLikelihoods likelihoods = GenotypeLikelihoods.fromLog10Likelihoods(genotypeLikelihoods); GenotypeLikelihoods likelihoods = GenotypeLikelihoods.fromLog10Likelihoods(genotypeLikelihoods);
HashMap<String, Object> attributes = new HashMap<String, Object>(); HashMap<String, Object> attributes = new HashMap<String, Object>();
@ -407,9 +397,9 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
genotypes.add(new Genotype(sample.getKey(), noCall, Genotype.NO_LOG10_PERROR, null, attributes, false)); genotypes.add(new Genotype(sample.getKey(), noCall, Genotype.NO_LOG10_PERROR, null, attributes, false));
if (DEBUG) { if (DEBUG) {
System.out.format("Sample:%s Alleles:%s GL:",sample.getKey(), alleleList.toString()); System.out.format("Sample:%s Alleles:%s GL:", sample.getKey(), alleleList.toString());
for (int k=0; k < genotypeLikelihoods.length; k++) for (int k = 0; k < genotypeLikelihoods.length; k++)
System.out.format("%1.4f ",genotypeLikelihoods[k]); System.out.format("%1.4f ", genotypeLikelihoods[k]);
System.out.println(); System.out.println();
} }
} }
@ -421,21 +411,21 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
private int calculateEndPos(Collection<Allele> alleles, Allele refAllele, GenomeLoc loc) { private int calculateEndPos(Collection<Allele> alleles, Allele refAllele, GenomeLoc loc) {
// for indels, stop location is one more than ref allele length // for indels, stop location is one more than ref allele length
boolean hasNullAltAllele = false; boolean hasNullAltAllele = false;
for ( Allele a : alleles ) { for (Allele a : alleles) {
if ( a.isNull() ) { if (a.isNull()) {
hasNullAltAllele = true; hasNullAltAllele = true;
break; break;
} }
} }
int endLoc = loc.getStart() + refAllele.length(); int endLoc = loc.getStart() + refAllele.length();
if( !hasNullAltAllele ) if (!hasNullAltAllele)
endLoc--; endLoc--;
return endLoc; return endLoc;
} }
public static HashMap<PileupElement,LinkedHashMap<Allele,Double>> getIndelLikelihoodMap() { public static HashMap<PileupElement, LinkedHashMap<Allele, Double>> getIndelLikelihoodMap() {
return indelLikelihoodMap.get(); return indelLikelihoodMap.get();
} }
@ -443,8 +433,8 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
// so that per-sample DP will include deletions covering the event. // so that per-sample DP will include deletions covering the event.
protected int getFilteredDepth(ReadBackedPileup pileup) { protected int getFilteredDepth(ReadBackedPileup pileup) {
int count = 0; int count = 0;
for ( PileupElement p : pileup ) { for (PileupElement p : pileup) {
if (p.isDeletion() || BaseUtils.isRegularBase(p.getBase()) ) if (p.isDeletion() || p.isInsertionAtBeginningOfRead() || BaseUtils.isRegularBase(p.getBase()))
count++; count++;
} }

View File

@ -212,7 +212,7 @@ public class SNPGenotypeLikelihoodsCalculationModel extends GenotypeLikelihoodsC
public class BAQedPileupElement extends PileupElement { public class BAQedPileupElement extends PileupElement {
public BAQedPileupElement( final PileupElement PE ) { public BAQedPileupElement( final PileupElement PE ) {
super(PE.getRead(), PE.getOffset()); super(PE.getRead(), PE.getOffset(), PE.isDeletion());
} }
@Override @Override

View File

@ -40,7 +40,7 @@ import java.util.*;
* @author mhanna * @author mhanna
* @version 0.1 * @version 0.1
*/ */
public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPileup<RBP,PE>,PE extends PileupElement> implements ReadBackedPileup { public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPileup<RBP, PE>, PE extends PileupElement> implements ReadBackedPileup {
protected final GenomeLoc loc; protected final GenomeLoc loc;
protected final PileupElementTracker<PE> pileupElementTracker; protected final PileupElementTracker<PE> pileupElementTracker;
@ -55,23 +55,18 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
* reads[i] for all i in offsets. Does not make a copy of the data, so it's not safe to * reads[i] for all i in offsets. Does not make a copy of the data, so it's not safe to
* go changing the reads. * go changing the reads.
* *
* @param loc * @param loc The genome loc to associate reads wotj
* @param reads * @param reads
* @param offsets * @param offsets
*/ */
public AbstractReadBackedPileup(GenomeLoc loc, List<GATKSAMRecord> reads, List<Integer> offsets ) { public AbstractReadBackedPileup(GenomeLoc loc, List<GATKSAMRecord> reads, List<Integer> offsets) {
this.loc = loc; this.loc = loc;
this.pileupElementTracker = readsOffsets2Pileup(reads,offsets); this.pileupElementTracker = readsOffsets2Pileup(reads, offsets);
} }
public AbstractReadBackedPileup(GenomeLoc loc, List<GATKSAMRecord> reads, int offset ) {
this.loc = loc;
this.pileupElementTracker = readsOffsets2Pileup(reads,offset);
}
/** /**
* Create a new version of a read backed pileup at loc without any aligned reads * Create a new version of a read backed pileup at loc without any aligned reads
*
*/ */
public AbstractReadBackedPileup(GenomeLoc loc) { public AbstractReadBackedPileup(GenomeLoc loc) {
this(loc, new UnifiedPileupElementTracker<PE>()); this(loc, new UnifiedPileupElementTracker<PE>());
@ -81,11 +76,10 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
* Create a new version of a read backed pileup at loc, using the reads and their corresponding * Create a new version of a read backed pileup at loc, using the reads and their corresponding
* offsets. This lower level constructure assumes pileup is well-formed and merely keeps a * offsets. This lower level constructure assumes pileup is well-formed and merely keeps a
* pointer to pileup. Don't go changing the data in pileup. * pointer to pileup. Don't go changing the data in pileup.
*
*/ */
public AbstractReadBackedPileup(GenomeLoc loc, List<PE> pileup) { public AbstractReadBackedPileup(GenomeLoc loc, List<PE> pileup) {
if ( loc == null ) throw new ReviewedStingException("Illegal null genomeloc in ReadBackedPileup"); if (loc == null) throw new ReviewedStingException("Illegal null genomeloc in ReadBackedPileup");
if ( pileup == null ) throw new ReviewedStingException("Illegal null pileup in ReadBackedPileup"); if (pileup == null) throw new ReviewedStingException("Illegal null pileup in ReadBackedPileup");
this.loc = loc; this.loc = loc;
this.pileupElementTracker = new UnifiedPileupElementTracker<PE>(pileup); this.pileupElementTracker = new UnifiedPileupElementTracker<PE>(pileup);
@ -94,12 +88,13 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
/** /**
* Optimization of above constructor where all of the cached data is provided * Optimization of above constructor where all of the cached data is provided
*
* @param loc * @param loc
* @param pileup * @param pileup
*/ */
public AbstractReadBackedPileup(GenomeLoc loc, List<PE> pileup, int size, int nDeletions, int nMQ0Reads) { public AbstractReadBackedPileup(GenomeLoc loc, List<PE> pileup, int size, int nDeletions, int nMQ0Reads) {
if ( loc == null ) throw new ReviewedStingException("Illegal null genomeloc in UnifiedReadBackedPileup"); if (loc == null) throw new ReviewedStingException("Illegal null genomeloc in UnifiedReadBackedPileup");
if ( pileup == null ) throw new ReviewedStingException("Illegal null pileup in UnifiedReadBackedPileup"); if (pileup == null) throw new ReviewedStingException("Illegal null pileup in UnifiedReadBackedPileup");
this.loc = loc; this.loc = loc;
this.pileupElementTracker = new UnifiedPileupElementTracker<PE>(pileup); this.pileupElementTracker = new UnifiedPileupElementTracker<PE>(pileup);
@ -115,16 +110,21 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
calculateCachedData(); calculateCachedData();
} }
protected AbstractReadBackedPileup(GenomeLoc loc, Map<String,? extends AbstractReadBackedPileup<RBP,PE>> pileupsBySample) { protected AbstractReadBackedPileup(GenomeLoc loc, Map<String, ? extends AbstractReadBackedPileup<RBP, PE>> pileupsBySample) {
this.loc = loc; this.loc = loc;
PerSamplePileupElementTracker<PE> tracker = new PerSamplePileupElementTracker<PE>(); PerSamplePileupElementTracker<PE> tracker = new PerSamplePileupElementTracker<PE>();
for(Map.Entry<String,? extends AbstractReadBackedPileup<RBP,PE>> pileupEntry: pileupsBySample.entrySet()) { for (Map.Entry<String, ? extends AbstractReadBackedPileup<RBP, PE>> pileupEntry : pileupsBySample.entrySet()) {
tracker.addElements(pileupEntry.getKey(),pileupEntry.getValue().pileupElementTracker); tracker.addElements(pileupEntry.getKey(), pileupEntry.getValue().pileupElementTracker);
addPileupToCumulativeStats(pileupEntry.getValue()); addPileupToCumulativeStats(pileupEntry.getValue());
} }
this.pileupElementTracker = tracker; this.pileupElementTracker = tracker;
} }
public AbstractReadBackedPileup(GenomeLoc loc, List<GATKSAMRecord> reads, int offset) {
this.loc = loc;
this.pileupElementTracker = readsOffsets2Pileup(reads, offset);
}
/** /**
* Calculate cached sizes, nDeletion, and base counts for the pileup. This calculation is done upfront, * Calculate cached sizes, nDeletion, and base counts for the pileup. This calculation is done upfront,
* so you pay the cost at the start, but it's more efficient to do this rather than pay the cost of calling * so you pay the cost at the start, but it's more efficient to do this rather than pay the cost of calling
@ -135,12 +135,12 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
nDeletions = 0; nDeletions = 0;
nMQ0Reads = 0; nMQ0Reads = 0;
for ( PileupElement p : pileupElementTracker ) { for (PileupElement p : pileupElementTracker) {
size++; size++;
if ( p.isDeletion() ) { if (p.isDeletion()) {
nDeletions++; nDeletions++;
} }
if ( p.getRead().getMappingQuality() == 0 ) { if (p.getRead().getMappingQuality() == 0) {
nMQ0Reads++; nMQ0Reads++;
} }
} }
@ -148,12 +148,12 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
protected void calculateAbstractSize() { protected void calculateAbstractSize() {
abstractSize = 0; abstractSize = 0;
for ( PileupElement p : pileupElementTracker ) { for (PileupElement p : pileupElementTracker) {
abstractSize += p.getRepresentativeCount(); abstractSize += p.getRepresentativeCount();
} }
} }
protected void addPileupToCumulativeStats(AbstractReadBackedPileup<RBP,PE> pileup) { protected void addPileupToCumulativeStats(AbstractReadBackedPileup<RBP, PE> pileup) {
size += pileup.getNumberOfElements(); size += pileup.getNumberOfElements();
abstractSize += pileup.depthOfCoverage(); abstractSize += pileup.depthOfCoverage();
nDeletions += pileup.getNumberOfDeletions(); nDeletions += pileup.getNumberOfDeletions();
@ -167,14 +167,17 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
* @param offsets * @param offsets
* @return * @return
*/ */
private PileupElementTracker<PE> readsOffsets2Pileup(List<GATKSAMRecord> reads, List<Integer> offsets ) { private PileupElementTracker<PE> readsOffsets2Pileup(List<GATKSAMRecord> reads, List<Integer> offsets) {
if ( reads == null ) throw new ReviewedStingException("Illegal null read list in UnifiedReadBackedPileup"); if (reads == null) throw new ReviewedStingException("Illegal null read list in UnifiedReadBackedPileup");
if ( offsets == null ) throw new ReviewedStingException("Illegal null offsets list in UnifiedReadBackedPileup"); if (offsets == null) throw new ReviewedStingException("Illegal null offsets list in UnifiedReadBackedPileup");
if ( reads.size() != offsets.size() ) throw new ReviewedStingException("Reads and offset lists have different sizes!"); if (reads.size() != offsets.size())
throw new ReviewedStingException("Reads and offset lists have different sizes!");
UnifiedPileupElementTracker<PE> pileup = new UnifiedPileupElementTracker<PE>(); UnifiedPileupElementTracker<PE> pileup = new UnifiedPileupElementTracker<PE>();
for ( int i = 0; i < reads.size(); i++ ) { for (int i = 0; i < reads.size(); i++) {
pileup.add(createNewPileupElement(reads.get(i),offsets.get(i))); GATKSAMRecord read = reads.get(i);
int offset = offsets.get(i);
pileup.add(createNewPileupElement(read, offset, BaseUtils.simpleBaseToBaseIndex(read.getReadBases()[offset]) == BaseUtils.D));
} }
return pileup; return pileup;
@ -187,20 +190,21 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
* @param offset * @param offset
* @return * @return
*/ */
private PileupElementTracker<PE> readsOffsets2Pileup(List<GATKSAMRecord> reads, int offset ) { private PileupElementTracker<PE> readsOffsets2Pileup(List<GATKSAMRecord> reads, int offset) {
if ( reads == null ) throw new ReviewedStingException("Illegal null read list in UnifiedReadBackedPileup"); if (reads == null) throw new ReviewedStingException("Illegal null read list in UnifiedReadBackedPileup");
if ( offset < 0 ) throw new ReviewedStingException("Illegal offset < 0 UnifiedReadBackedPileup"); if (offset < 0) throw new ReviewedStingException("Illegal offset < 0 UnifiedReadBackedPileup");
UnifiedPileupElementTracker<PE> pileup = new UnifiedPileupElementTracker<PE>(); UnifiedPileupElementTracker<PE> pileup = new UnifiedPileupElementTracker<PE>();
for ( int i = 0; i < reads.size(); i++ ) { for (GATKSAMRecord read : reads) {
pileup.add(createNewPileupElement( reads.get(i), offset )); pileup.add(createNewPileupElement(read, offset, BaseUtils.simpleBaseToBaseIndex(read.getReadBases()[offset]) == BaseUtils.D));
} }
return pileup; return pileup;
} }
protected abstract AbstractReadBackedPileup<RBP,PE> createNewPileup(GenomeLoc loc, PileupElementTracker<PE> pileupElementTracker); protected abstract AbstractReadBackedPileup<RBP, PE> createNewPileup(GenomeLoc loc, PileupElementTracker<PE> pileupElementTracker);
protected abstract PE createNewPileupElement(GATKSAMRecord read, int offset);
protected abstract PE createNewPileupElement(GATKSAMRecord read, int offset, boolean isDeletion);
// -------------------------------------------------------- // --------------------------------------------------------
// //
@ -217,32 +221,31 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
*/ */
@Override @Override
public RBP getPileupWithoutDeletions() { public RBP getPileupWithoutDeletions() {
if ( getNumberOfDeletions() > 0 ) { if (getNumberOfDeletions() > 0) {
if(pileupElementTracker instanceof PerSamplePileupElementTracker) { if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>)pileupElementTracker; PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>) pileupElementTracker;
PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>(); PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>();
for(final String sample: tracker.getSamples()) { for (final String sample : tracker.getSamples()) {
PileupElementTracker<PE> perSampleElements = tracker.getElements(sample); PileupElementTracker<PE> perSampleElements = tracker.getElements(sample);
AbstractReadBackedPileup<RBP,PE> pileup = createNewPileup(loc,perSampleElements).getPileupWithoutDeletions(); AbstractReadBackedPileup<RBP, PE> pileup = createNewPileup(loc, perSampleElements).getPileupWithoutDeletions();
filteredTracker.addElements(sample,pileup.pileupElementTracker); filteredTracker.addElements(sample, pileup.pileupElementTracker);
} }
return (RBP)createNewPileup(loc,filteredTracker); return (RBP) createNewPileup(loc, filteredTracker);
} } else {
else { UnifiedPileupElementTracker<PE> tracker = (UnifiedPileupElementTracker<PE>) pileupElementTracker;
UnifiedPileupElementTracker<PE> tracker = (UnifiedPileupElementTracker<PE>)pileupElementTracker;
UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>(); UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>();
for ( PE p : tracker ) { for (PE p : tracker) {
if ( !p.isDeletion() ) { if (!p.isDeletion()) {
filteredTracker.add(p); filteredTracker.add(p);
} }
} }
return (RBP)createNewPileup(loc, filteredTracker); return (RBP) createNewPileup(loc, filteredTracker);
} }
} else { } else {
return (RBP)this; return (RBP) this;
} }
} }
@ -256,21 +259,20 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
*/ */
@Override @Override
public RBP getOverlappingFragmentFilteredPileup() { public RBP getOverlappingFragmentFilteredPileup() {
if(pileupElementTracker instanceof PerSamplePileupElementTracker) { if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>)pileupElementTracker; PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>) pileupElementTracker;
PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>(); PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>();
for(final String sample: tracker.getSamples()) { for (final String sample : tracker.getSamples()) {
PileupElementTracker<PE> perSampleElements = tracker.getElements(sample); PileupElementTracker<PE> perSampleElements = tracker.getElements(sample);
AbstractReadBackedPileup<RBP,PE> pileup = createNewPileup(loc,perSampleElements).getOverlappingFragmentFilteredPileup(); AbstractReadBackedPileup<RBP, PE> pileup = createNewPileup(loc, perSampleElements).getOverlappingFragmentFilteredPileup();
filteredTracker.addElements(sample,pileup.pileupElementTracker); filteredTracker.addElements(sample, pileup.pileupElementTracker);
} }
return (RBP)createNewPileup(loc,filteredTracker); return (RBP) createNewPileup(loc, filteredTracker);
} } else {
else { Map<String, PE> filteredPileup = new HashMap<String, PE>();
Map<String,PE> filteredPileup = new HashMap<String, PE>();
for ( PE p : pileupElementTracker ) { for (PE p : pileupElementTracker) {
String readName = p.getRead().getReadName(); String readName = p.getRead().getReadName();
// if we've never seen this read before, life is good // if we've never seen this read before, life is good
@ -292,10 +294,10 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
} }
UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>(); UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>();
for(PE filteredElement: filteredPileup.values()) for (PE filteredElement : filteredPileup.values())
filteredTracker.add(filteredElement); filteredTracker.add(filteredElement);
return (RBP)createNewPileup(loc,filteredTracker); return (RBP) createNewPileup(loc, filteredTracker);
} }
} }
@ -309,300 +311,299 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
*/ */
@Override @Override
public RBP getPileupWithoutMappingQualityZeroReads() { public RBP getPileupWithoutMappingQualityZeroReads() {
if ( getNumberOfMappingQualityZeroReads() > 0 ) { if (getNumberOfMappingQualityZeroReads() > 0) {
if(pileupElementTracker instanceof PerSamplePileupElementTracker) { if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>)pileupElementTracker; PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>) pileupElementTracker;
PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>(); PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>();
for(final String sample: tracker.getSamples()) { for (final String sample : tracker.getSamples()) {
PileupElementTracker<PE> perSampleElements = tracker.getElements(sample); PileupElementTracker<PE> perSampleElements = tracker.getElements(sample);
AbstractReadBackedPileup<RBP,PE> pileup = createNewPileup(loc,perSampleElements).getPileupWithoutMappingQualityZeroReads(); AbstractReadBackedPileup<RBP, PE> pileup = createNewPileup(loc, perSampleElements).getPileupWithoutMappingQualityZeroReads();
filteredTracker.addElements(sample,pileup.pileupElementTracker); filteredTracker.addElements(sample, pileup.pileupElementTracker);
} }
return (RBP)createNewPileup(loc,filteredTracker); return (RBP) createNewPileup(loc, filteredTracker);
} } else {
else { UnifiedPileupElementTracker<PE> tracker = (UnifiedPileupElementTracker<PE>) pileupElementTracker;
UnifiedPileupElementTracker<PE> tracker = (UnifiedPileupElementTracker<PE>)pileupElementTracker;
UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>(); UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>();
for ( PE p : tracker ) { for (PE p : tracker) {
if ( p.getRead().getMappingQuality() > 0 ) { if (p.getRead().getMappingQuality() > 0) {
filteredTracker.add(p); filteredTracker.add(p);
} }
} }
return (RBP)createNewPileup(loc, filteredTracker); return (RBP) createNewPileup(loc, filteredTracker);
} }
} else { } else {
return (RBP)this; return (RBP) this;
} }
} }
public RBP getPositiveStrandPileup() { public RBP getPositiveStrandPileup() {
if(pileupElementTracker instanceof PerSamplePileupElementTracker) { if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>)pileupElementTracker; PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>) pileupElementTracker;
PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>(); PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>();
for(final String sample: tracker.getSamples()) { for (final String sample : tracker.getSamples()) {
PileupElementTracker<PE> perSampleElements = tracker.getElements(sample); PileupElementTracker<PE> perSampleElements = tracker.getElements(sample);
AbstractReadBackedPileup<RBP,PE> pileup = createNewPileup(loc,perSampleElements).getPositiveStrandPileup(); AbstractReadBackedPileup<RBP, PE> pileup = createNewPileup(loc, perSampleElements).getPositiveStrandPileup();
filteredTracker.addElements(sample,pileup.pileupElementTracker); filteredTracker.addElements(sample, pileup.pileupElementTracker);
} }
return (RBP)createNewPileup(loc,filteredTracker); return (RBP) createNewPileup(loc, filteredTracker);
} } else {
else { UnifiedPileupElementTracker<PE> tracker = (UnifiedPileupElementTracker<PE>) pileupElementTracker;
UnifiedPileupElementTracker<PE> tracker = (UnifiedPileupElementTracker<PE>)pileupElementTracker;
UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>(); UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>();
for ( PE p : tracker ) { for (PE p : tracker) {
if ( !p.getRead().getReadNegativeStrandFlag() ) { if (!p.getRead().getReadNegativeStrandFlag()) {
filteredTracker.add(p); filteredTracker.add(p);
} }
} }
return (RBP)createNewPileup(loc, filteredTracker); return (RBP) createNewPileup(loc, filteredTracker);
} }
} }
/** /**
* Gets the pileup consisting of only reads on the negative strand. * Gets the pileup consisting of only reads on the negative strand.
*
* @return A read-backed pileup consisting only of reads on the negative strand. * @return A read-backed pileup consisting only of reads on the negative strand.
*/ */
public RBP getNegativeStrandPileup() { public RBP getNegativeStrandPileup() {
if(pileupElementTracker instanceof PerSamplePileupElementTracker) { if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>)pileupElementTracker; PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>) pileupElementTracker;
PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>(); PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>();
for(final String sample: tracker.getSamples()) { for (final String sample : tracker.getSamples()) {
PileupElementTracker<PE> perSampleElements = tracker.getElements(sample); PileupElementTracker<PE> perSampleElements = tracker.getElements(sample);
AbstractReadBackedPileup<RBP,PE> pileup = createNewPileup(loc,perSampleElements).getNegativeStrandPileup(); AbstractReadBackedPileup<RBP, PE> pileup = createNewPileup(loc, perSampleElements).getNegativeStrandPileup();
filteredTracker.addElements(sample,pileup.pileupElementTracker); filteredTracker.addElements(sample, pileup.pileupElementTracker);
} }
return (RBP)createNewPileup(loc,filteredTracker); return (RBP) createNewPileup(loc, filteredTracker);
} } else {
else { UnifiedPileupElementTracker<PE> tracker = (UnifiedPileupElementTracker<PE>) pileupElementTracker;
UnifiedPileupElementTracker<PE> tracker = (UnifiedPileupElementTracker<PE>)pileupElementTracker;
UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>(); UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>();
for ( PE p : tracker ) { for (PE p : tracker) {
if ( p.getRead().getReadNegativeStrandFlag() ) { if (p.getRead().getReadNegativeStrandFlag()) {
filteredTracker.add(p); filteredTracker.add(p);
} }
} }
return (RBP)createNewPileup(loc, filteredTracker); return (RBP) createNewPileup(loc, filteredTracker);
} }
} }
/** /**
* Gets a pileup consisting of all those elements passed by a given filter. * Gets a pileup consisting of all those elements passed by a given filter.
*
* @param filter Filter to use when testing for elements. * @param filter Filter to use when testing for elements.
* @return a pileup without the given filtered elements. * @return a pileup without the given filtered elements.
*/ */
public RBP getFilteredPileup(PileupElementFilter filter) { public RBP getFilteredPileup(PileupElementFilter filter) {
if(pileupElementTracker instanceof PerSamplePileupElementTracker) { if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>)pileupElementTracker; PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>) pileupElementTracker;
PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>(); PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>();
for(final String sample: tracker.getSamples()) { for (final String sample : tracker.getSamples()) {
PileupElementTracker<PE> perSampleElements = tracker.getElements(sample); PileupElementTracker<PE> perSampleElements = tracker.getElements(sample);
AbstractReadBackedPileup<RBP,PE> pileup = createNewPileup(loc,perSampleElements).getFilteredPileup(filter); AbstractReadBackedPileup<RBP, PE> pileup = createNewPileup(loc, perSampleElements).getFilteredPileup(filter);
filteredTracker.addElements(sample,pileup.pileupElementTracker); filteredTracker.addElements(sample, pileup.pileupElementTracker);
} }
return (RBP)createNewPileup(loc,filteredTracker); return (RBP) createNewPileup(loc, filteredTracker);
} } else {
else {
UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>(); UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>();
for ( PE p : pileupElementTracker ) { for (PE p : pileupElementTracker) {
if( filter.allow(p) ) if (filter.allow(p))
filteredTracker.add(p); filteredTracker.add(p);
} }
return (RBP)createNewPileup(loc, filteredTracker); return (RBP) createNewPileup(loc, filteredTracker);
} }
} }
/** Returns subset of this pileup that contains only bases with quality >= minBaseQ, coming from /**
* Returns subset of this pileup that contains only bases with quality >= minBaseQ, coming from
* reads with mapping qualities >= minMapQ. This method allocates and returns a new instance of ReadBackedPileup. * reads with mapping qualities >= minMapQ. This method allocates and returns a new instance of ReadBackedPileup.
*
* @param minBaseQ * @param minBaseQ
* @param minMapQ * @param minMapQ
* @return * @return
*/ */
@Override @Override
public RBP getBaseAndMappingFilteredPileup( int minBaseQ, int minMapQ ) { public RBP getBaseAndMappingFilteredPileup(int minBaseQ, int minMapQ) {
if(pileupElementTracker instanceof PerSamplePileupElementTracker) { if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>)pileupElementTracker; PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>) pileupElementTracker;
PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>(); PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>();
for(final String sample: tracker.getSamples()) { for (final String sample : tracker.getSamples()) {
PileupElementTracker<PE> perSampleElements = tracker.getElements(sample); PileupElementTracker<PE> perSampleElements = tracker.getElements(sample);
AbstractReadBackedPileup<RBP,PE> pileup = createNewPileup(loc,perSampleElements).getBaseAndMappingFilteredPileup(minBaseQ,minMapQ); AbstractReadBackedPileup<RBP, PE> pileup = createNewPileup(loc, perSampleElements).getBaseAndMappingFilteredPileup(minBaseQ, minMapQ);
filteredTracker.addElements(sample,pileup.pileupElementTracker); filteredTracker.addElements(sample, pileup.pileupElementTracker);
} }
return (RBP)createNewPileup(loc,filteredTracker); return (RBP) createNewPileup(loc, filteredTracker);
} } else {
else {
UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>(); UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>();
for ( PE p : pileupElementTracker ) { for (PE p : pileupElementTracker) {
if ( p.getRead().getMappingQuality() >= minMapQ && if (p.getRead().getMappingQuality() >= minMapQ &&
(p.isDeletion() || (p.isDeletion() ||
((p instanceof ExtendedEventPileupElement) && ((ExtendedEventPileupElement)p).getType() == ExtendedEventPileupElement.Type.NOEVENT) || ((p instanceof ExtendedEventPileupElement) && ((ExtendedEventPileupElement) p).getType() == ExtendedEventPileupElement.Type.NOEVENT) ||
p.getQual() >= minBaseQ) ) { p.getQual() >= minBaseQ)) {
filteredTracker.add(p); filteredTracker.add(p);
} }
} }
return (RBP)createNewPileup(loc, filteredTracker); return (RBP) createNewPileup(loc, filteredTracker);
} }
} }
/** Returns subset of this pileup that contains only bases with quality >= minBaseQ. /**
* Returns subset of this pileup that contains only bases with quality >= minBaseQ.
* This method allocates and returns a new instance of ReadBackedPileup. * This method allocates and returns a new instance of ReadBackedPileup.
*
* @param minBaseQ * @param minBaseQ
* @return * @return
*/ */
@Override @Override
public RBP getBaseFilteredPileup( int minBaseQ ) { public RBP getBaseFilteredPileup(int minBaseQ) {
return getBaseAndMappingFilteredPileup(minBaseQ, -1); return getBaseAndMappingFilteredPileup(minBaseQ, -1);
} }
/** Returns subset of this pileup that contains only bases coming from reads with mapping quality >= minMapQ. /**
* Returns subset of this pileup that contains only bases coming from reads with mapping quality >= minMapQ.
* This method allocates and returns a new instance of ReadBackedPileup. * This method allocates and returns a new instance of ReadBackedPileup.
*
* @param minMapQ * @param minMapQ
* @return * @return
*/ */
@Override @Override
public RBP getMappingFilteredPileup( int minMapQ ) { public RBP getMappingFilteredPileup(int minMapQ) {
return getBaseAndMappingFilteredPileup(-1, minMapQ); return getBaseAndMappingFilteredPileup(-1, minMapQ);
} }
/** /**
* Gets a list of the read groups represented in this pileup. * Gets a list of the read groups represented in this pileup.
*
* @return * @return
*/ */
@Override @Override
public Collection<String> getReadGroups() { public Collection<String> getReadGroups() {
Set<String> readGroups = new HashSet<String>(); Set<String> readGroups = new HashSet<String>();
for(PileupElement pileupElement: this) for (PileupElement pileupElement : this)
readGroups.add(pileupElement.getRead().getReadGroup().getReadGroupId()); readGroups.add(pileupElement.getRead().getReadGroup().getReadGroupId());
return readGroups; return readGroups;
} }
/** /**
* Gets the pileup for a given read group. Horrendously inefficient at this point. * Gets the pileup for a given read group. Horrendously inefficient at this point.
*
* @param targetReadGroupId Identifier for the read group. * @param targetReadGroupId Identifier for the read group.
* @return A read-backed pileup containing only the reads in the given read group. * @return A read-backed pileup containing only the reads in the given read group.
*/ */
@Override @Override
public RBP getPileupForReadGroup(String targetReadGroupId) { public RBP getPileupForReadGroup(String targetReadGroupId) {
if(pileupElementTracker instanceof PerSamplePileupElementTracker) { if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>)pileupElementTracker; PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>) pileupElementTracker;
PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>(); PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>();
for(final String sample: tracker.getSamples()) { for (final String sample : tracker.getSamples()) {
PileupElementTracker<PE> perSampleElements = tracker.getElements(sample); PileupElementTracker<PE> perSampleElements = tracker.getElements(sample);
AbstractReadBackedPileup<RBP,PE> pileup = createNewPileup(loc,perSampleElements).getPileupForReadGroup(targetReadGroupId); AbstractReadBackedPileup<RBP, PE> pileup = createNewPileup(loc, perSampleElements).getPileupForReadGroup(targetReadGroupId);
if(pileup != null) if (pileup != null)
filteredTracker.addElements(sample,pileup.pileupElementTracker); filteredTracker.addElements(sample, pileup.pileupElementTracker);
} }
return filteredTracker.size()>0 ? (RBP)createNewPileup(loc,filteredTracker) : null; return filteredTracker.size() > 0 ? (RBP) createNewPileup(loc, filteredTracker) : null;
} } else {
else {
UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>(); UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>();
for(PE p: pileupElementTracker) { for (PE p : pileupElementTracker) {
GATKSAMRecord read = p.getRead(); GATKSAMRecord read = p.getRead();
if(targetReadGroupId != null) { if (targetReadGroupId != null) {
if(read.getReadGroup() != null && targetReadGroupId.equals(read.getReadGroup().getReadGroupId())) if (read.getReadGroup() != null && targetReadGroupId.equals(read.getReadGroup().getReadGroupId()))
filteredTracker.add(p); filteredTracker.add(p);
} } else {
else { if (read.getReadGroup() == null || read.getReadGroup().getReadGroupId() == null)
if(read.getReadGroup() == null || read.getReadGroup().getReadGroupId() == null)
filteredTracker.add(p); filteredTracker.add(p);
} }
} }
return filteredTracker.size()>0 ? (RBP)createNewPileup(loc,filteredTracker) : null; return filteredTracker.size() > 0 ? (RBP) createNewPileup(loc, filteredTracker) : null;
} }
} }
/** /**
* Gets the pileup for a set of read groups. Horrendously inefficient at this point. * Gets the pileup for a set of read groups. Horrendously inefficient at this point.
*
* @param rgSet List of identifiers for the read groups. * @param rgSet List of identifiers for the read groups.
* @return A read-backed pileup containing only the reads in the given read groups. * @return A read-backed pileup containing only the reads in the given read groups.
*/ */
@Override @Override
public RBP getPileupForReadGroups(final HashSet<String> rgSet) { public RBP getPileupForReadGroups(final HashSet<String> rgSet) {
if(pileupElementTracker instanceof PerSamplePileupElementTracker) { if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>)pileupElementTracker; PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>) pileupElementTracker;
PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>(); PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>();
for(final String sample: tracker.getSamples()) { for (final String sample : tracker.getSamples()) {
PileupElementTracker<PE> perSampleElements = tracker.getElements(sample); PileupElementTracker<PE> perSampleElements = tracker.getElements(sample);
AbstractReadBackedPileup<RBP,PE> pileup = createNewPileup(loc,perSampleElements).getPileupForReadGroups(rgSet); AbstractReadBackedPileup<RBP, PE> pileup = createNewPileup(loc, perSampleElements).getPileupForReadGroups(rgSet);
if(pileup != null) if (pileup != null)
filteredTracker.addElements(sample,pileup.pileupElementTracker); filteredTracker.addElements(sample, pileup.pileupElementTracker);
} }
return filteredTracker.size()>0 ? (RBP)createNewPileup(loc,filteredTracker) : null; return filteredTracker.size() > 0 ? (RBP) createNewPileup(loc, filteredTracker) : null;
} } else {
else {
UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>(); UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>();
for(PE p: pileupElementTracker) { for (PE p : pileupElementTracker) {
GATKSAMRecord read = p.getRead(); GATKSAMRecord read = p.getRead();
if(rgSet != null && !rgSet.isEmpty()) { if (rgSet != null && !rgSet.isEmpty()) {
if(read.getReadGroup() != null && rgSet.contains(read.getReadGroup().getReadGroupId())) if (read.getReadGroup() != null && rgSet.contains(read.getReadGroup().getReadGroupId()))
filteredTracker.add(p); filteredTracker.add(p);
} } else {
else { if (read.getReadGroup() == null || read.getReadGroup().getReadGroupId() == null)
if(read.getReadGroup() == null || read.getReadGroup().getReadGroupId() == null)
filteredTracker.add(p); filteredTracker.add(p);
} }
} }
return filteredTracker.size()>0 ? (RBP)createNewPileup(loc,filteredTracker) : null; return filteredTracker.size() > 0 ? (RBP) createNewPileup(loc, filteredTracker) : null;
} }
} }
@Override @Override
public RBP getPileupForLane(String laneID) { public RBP getPileupForLane(String laneID) {
if(pileupElementTracker instanceof PerSamplePileupElementTracker) { if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>)pileupElementTracker; PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>) pileupElementTracker;
PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>(); PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>();
for(final String sample: tracker.getSamples()) { for (final String sample : tracker.getSamples()) {
PileupElementTracker<PE> perSampleElements = tracker.getElements(sample); PileupElementTracker<PE> perSampleElements = tracker.getElements(sample);
AbstractReadBackedPileup<RBP,PE> pileup = createNewPileup(loc,perSampleElements).getPileupForLane(laneID); AbstractReadBackedPileup<RBP, PE> pileup = createNewPileup(loc, perSampleElements).getPileupForLane(laneID);
if(pileup != null) if (pileup != null)
filteredTracker.addElements(sample,pileup.pileupElementTracker); filteredTracker.addElements(sample, pileup.pileupElementTracker);
} }
return filteredTracker.size()>0 ? (RBP)createNewPileup(loc,filteredTracker) : null; return filteredTracker.size() > 0 ? (RBP) createNewPileup(loc, filteredTracker) : null;
} } else {
else {
UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>(); UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>();
for(PE p: pileupElementTracker) { for (PE p : pileupElementTracker) {
GATKSAMRecord read = p.getRead(); GATKSAMRecord read = p.getRead();
if(laneID != null) { if (laneID != null) {
if(read.getReadGroup() != null && if (read.getReadGroup() != null &&
(read.getReadGroup().getReadGroupId().startsWith(laneID + ".")) || // lane is the same, but sample identifier is different (read.getReadGroup().getReadGroupId().startsWith(laneID + ".")) || // lane is the same, but sample identifier is different
(read.getReadGroup().getReadGroupId().equals(laneID))) // in case there is no sample identifier, they have to be exactly the same (read.getReadGroup().getReadGroupId().equals(laneID))) // in case there is no sample identifier, they have to be exactly the same
filteredTracker.add(p); filteredTracker.add(p);
} } else {
else { if (read.getReadGroup() == null || read.getReadGroup().getReadGroupId() == null)
if(read.getReadGroup() == null || read.getReadGroup().getReadGroupId() == null)
filteredTracker.add(p); filteredTracker.add(p);
} }
} }
return filteredTracker.size()>0 ? (RBP)createNewPileup(loc,filteredTracker) : null; return filteredTracker.size() > 0 ? (RBP) createNewPileup(loc, filteredTracker) : null;
} }
} }
public Collection<String> getSamples() { public Collection<String> getSamples() {
if(pileupElementTracker instanceof PerSamplePileupElementTracker) { if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>)pileupElementTracker; PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>) pileupElementTracker;
return new HashSet<String>(tracker.getSamples()); return new HashSet<String>(tracker.getSamples());
} } else {
else {
Collection<String> sampleNames = new HashSet<String>(); Collection<String> sampleNames = new HashSet<String>();
for(PileupElement p: this) { for (PileupElement p : this) {
GATKSAMRecord read = p.getRead(); GATKSAMRecord read = p.getRead();
String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null; String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null;
sampleNames.add(sampleName); sampleNames.add(sampleName);
@ -619,103 +620,98 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
*/ */
@Override @Override
public RBP getDownsampledPileup(int desiredCoverage) { public RBP getDownsampledPileup(int desiredCoverage) {
if ( getNumberOfElements() <= desiredCoverage ) if (getNumberOfElements() <= desiredCoverage)
return (RBP)this; return (RBP) this;
// randomly choose numbers corresponding to positions in the reads list // randomly choose numbers corresponding to positions in the reads list
TreeSet<Integer> positions = new TreeSet<Integer>(); TreeSet<Integer> positions = new TreeSet<Integer>();
for ( int i = 0; i < desiredCoverage; /* no update */ ) { for (int i = 0; i < desiredCoverage; /* no update */) {
if ( positions.add(GenomeAnalysisEngine.getRandomGenerator().nextInt(size)) ) if (positions.add(GenomeAnalysisEngine.getRandomGenerator().nextInt(size)))
i++; i++;
} }
if(pileupElementTracker instanceof PerSamplePileupElementTracker) { if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>)pileupElementTracker; PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>) pileupElementTracker;
PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>(); PerSamplePileupElementTracker<PE> filteredTracker = new PerSamplePileupElementTracker<PE>();
int current = 0; int current = 0;
for(final String sample: tracker.getSamples()) { for (final String sample : tracker.getSamples()) {
PileupElementTracker<PE> perSampleElements = tracker.getElements(sample); PileupElementTracker<PE> perSampleElements = tracker.getElements(sample);
List<PileupElement> filteredPileup = new ArrayList<PileupElement>(); List<PileupElement> filteredPileup = new ArrayList<PileupElement>();
for(PileupElement p: perSampleElements) { for (PileupElement p : perSampleElements) {
if(positions.contains(current)) if (positions.contains(current))
filteredPileup.add(p); filteredPileup.add(p);
} }
if(!filteredPileup.isEmpty()) { if (!filteredPileup.isEmpty()) {
AbstractReadBackedPileup<RBP,PE> pileup = createNewPileup(loc,perSampleElements); AbstractReadBackedPileup<RBP, PE> pileup = createNewPileup(loc, perSampleElements);
filteredTracker.addElements(sample,pileup.pileupElementTracker); filteredTracker.addElements(sample, pileup.pileupElementTracker);
} }
current++; current++;
} }
return (RBP)createNewPileup(loc,filteredTracker); return (RBP) createNewPileup(loc, filteredTracker);
} } else {
else { UnifiedPileupElementTracker<PE> tracker = (UnifiedPileupElementTracker<PE>) pileupElementTracker;
UnifiedPileupElementTracker<PE> tracker = (UnifiedPileupElementTracker<PE>)pileupElementTracker;
UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>(); UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>();
Iterator positionIter = positions.iterator(); Iterator positionIter = positions.iterator();
while ( positionIter.hasNext() ) { while (positionIter.hasNext()) {
int nextReadToKeep = (Integer)positionIter.next(); int nextReadToKeep = (Integer) positionIter.next();
filteredTracker.add(tracker.get(nextReadToKeep)); filteredTracker.add(tracker.get(nextReadToKeep));
} }
return (RBP)createNewPileup(getLocation(), filteredTracker); return (RBP) createNewPileup(getLocation(), filteredTracker);
} }
} }
@Override @Override
public RBP getPileupForSamples(Collection<String> sampleNames) { public RBP getPileupForSamples(Collection<String> sampleNames) {
if(pileupElementTracker instanceof PerSamplePileupElementTracker) { if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>)pileupElementTracker; PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>) pileupElementTracker;
PileupElementTracker<PE> filteredElements = tracker.getElements(sampleNames); PileupElementTracker<PE> filteredElements = tracker.getElements(sampleNames);
return filteredElements != null ? (RBP)createNewPileup(loc,filteredElements) : null; return filteredElements != null ? (RBP) createNewPileup(loc, filteredElements) : null;
} } else {
else {
HashSet<String> hashSampleNames = new HashSet<String>(sampleNames); // to speed up the "contains" access in the for loop HashSet<String> hashSampleNames = new HashSet<String>(sampleNames); // to speed up the "contains" access in the for loop
UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>(); UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>();
for(PE p: pileupElementTracker) { for (PE p : pileupElementTracker) {
GATKSAMRecord read = p.getRead(); GATKSAMRecord read = p.getRead();
if(sampleNames != null) { // still checking on sampleNames because hashSampleNames will never be null. And empty means something else. if (sampleNames != null) { // still checking on sampleNames because hashSampleNames will never be null. And empty means something else.
if(read.getReadGroup() != null && hashSampleNames.contains(read.getReadGroup().getSample())) if (read.getReadGroup() != null && hashSampleNames.contains(read.getReadGroup().getSample()))
filteredTracker.add(p); filteredTracker.add(p);
} } else {
else { if (read.getReadGroup() == null || read.getReadGroup().getSample() == null)
if(read.getReadGroup() == null || read.getReadGroup().getSample() == null)
filteredTracker.add(p); filteredTracker.add(p);
} }
} }
return filteredTracker.size()>0 ? (RBP)createNewPileup(loc,filteredTracker) : null; return filteredTracker.size() > 0 ? (RBP) createNewPileup(loc, filteredTracker) : null;
} }
} }
@Override @Override
public RBP getPileupForSample(String sampleName) { public RBP getPileupForSample(String sampleName) {
if(pileupElementTracker instanceof PerSamplePileupElementTracker) { if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>)pileupElementTracker; PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>) pileupElementTracker;
PileupElementTracker<PE> filteredElements = tracker.getElements(sampleName); PileupElementTracker<PE> filteredElements = tracker.getElements(sampleName);
return filteredElements != null ? (RBP)createNewPileup(loc,filteredElements) : null; return filteredElements != null ? (RBP) createNewPileup(loc, filteredElements) : null;
} } else {
else {
UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>(); UnifiedPileupElementTracker<PE> filteredTracker = new UnifiedPileupElementTracker<PE>();
for(PE p: pileupElementTracker) { for (PE p : pileupElementTracker) {
GATKSAMRecord read = p.getRead(); GATKSAMRecord read = p.getRead();
if(sampleName != null) { if (sampleName != null) {
if(read.getReadGroup() != null && sampleName.equals(read.getReadGroup().getSample())) if (read.getReadGroup() != null && sampleName.equals(read.getReadGroup().getSample()))
filteredTracker.add(p); filteredTracker.add(p);
} } else {
else { if (read.getReadGroup() == null || read.getReadGroup().getSample() == null)
if(read.getReadGroup() == null || read.getReadGroup().getSample() == null)
filteredTracker.add(p); filteredTracker.add(p);
} }
} }
return filteredTracker.size()>0 ? (RBP)createNewPileup(loc,filteredTracker) : null; return filteredTracker.size() > 0 ? (RBP) createNewPileup(loc, filteredTracker) : null;
} }
} }
@ -727,9 +723,9 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
/** /**
* The best way to access PileupElements where you only care about the bases and quals in the pileup. * The best way to access PileupElements where you only care about the bases and quals in the pileup.
* * <p/>
* for (PileupElement p : this) { doSomething(p); } * for (PileupElement p : this) { doSomething(p); }
* * <p/>
* Provides efficient iteration of the data. * Provides efficient iteration of the data.
* *
* @return * @return
@ -739,9 +735,17 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
return new Iterator<PileupElement>() { return new Iterator<PileupElement>() {
private final Iterator<PE> wrappedIterator = pileupElementTracker.iterator(); private final Iterator<PE> wrappedIterator = pileupElementTracker.iterator();
public boolean hasNext() { return wrappedIterator.hasNext(); } public boolean hasNext() {
public PileupElement next() { return wrappedIterator.next(); } return wrappedIterator.hasNext();
public void remove() { throw new UnsupportedOperationException("Cannot remove from a pileup element iterator"); } }
public PileupElement next() {
return wrappedIterator.next();
}
public void remove() {
throw new UnsupportedOperationException("Cannot remove from a pileup element iterator");
}
}; };
} }
@ -784,7 +788,7 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
*/ */
@Override @Override
public int depthOfCoverage() { public int depthOfCoverage() {
if ( abstractSize == -1 ) if (abstractSize == -1)
calculateAbstractSize(); calculateAbstractSize();
return abstractSize; return abstractSize;
} }
@ -794,7 +798,7 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
*/ */
@Override @Override
public boolean isEmpty() { public boolean isEmpty() {
return size==0; return size == 0;
} }
@ -816,19 +820,18 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
public int[] getBaseCounts() { public int[] getBaseCounts() {
int[] counts = new int[4]; int[] counts = new int[4];
if(pileupElementTracker instanceof PerSamplePileupElementTracker) { if (pileupElementTracker instanceof PerSamplePileupElementTracker) {
PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>)pileupElementTracker; PerSamplePileupElementTracker<PE> tracker = (PerSamplePileupElementTracker<PE>) pileupElementTracker;
for(final String sample: tracker.getSamples()) { for (final String sample : tracker.getSamples()) {
int[] countsBySample = createNewPileup(loc,tracker.getElements(sample)).getBaseCounts(); int[] countsBySample = createNewPileup(loc, tracker.getElements(sample)).getBaseCounts();
for(int i = 0; i < counts.length; i++) for (int i = 0; i < counts.length; i++)
counts[i] += countsBySample[i]; counts[i] += countsBySample[i];
} }
} } else {
else { for (PileupElement pile : this) {
for ( PileupElement pile : this ) {
// skip deletion sites // skip deletion sites
if ( ! pile.isDeletion() ) { if (!pile.isDeletion()) {
int index = BaseUtils.simpleBaseToBaseIndex((char)pile.getBase()); int index = BaseUtils.simpleBaseToBaseIndex((char) pile.getBase());
if (index != -1) if (index != -1)
counts[index]++; counts[index]++;
} }
@ -857,65 +860,80 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
/** /**
* Returns a list of the reads in this pileup. Note this call costs O(n) and allocates fresh lists each time * Returns a list of the reads in this pileup. Note this call costs O(n) and allocates fresh lists each time
*
* @return * @return
*/ */
@Override @Override
public List<GATKSAMRecord> getReads() { public List<GATKSAMRecord> getReads() {
List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>(getNumberOfElements()); List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>(getNumberOfElements());
for ( PileupElement pile : this ) { reads.add(pile.getRead()); } for (PileupElement pile : this) {
reads.add(pile.getRead());
}
return reads; return reads;
} }
/** /**
* Returns a list of the offsets in this pileup. Note this call costs O(n) and allocates fresh lists each time * Returns a list of the offsets in this pileup. Note this call costs O(n) and allocates fresh lists each time
*
* @return * @return
*/ */
@Override @Override
public List<Integer> getOffsets() { public List<Integer> getOffsets() {
List<Integer> offsets = new ArrayList<Integer>(getNumberOfElements()); List<Integer> offsets = new ArrayList<Integer>(getNumberOfElements());
for ( PileupElement pile : this ) { offsets.add(pile.getOffset()); } for (PileupElement pile : this) {
offsets.add(pile.getOffset());
}
return offsets; return offsets;
} }
/** /**
* Returns an array of the bases in this pileup. Note this call costs O(n) and allocates fresh array each time * Returns an array of the bases in this pileup. Note this call costs O(n) and allocates fresh array each time
*
* @return * @return
*/ */
@Override @Override
public byte[] getBases() { public byte[] getBases() {
byte[] v = new byte[getNumberOfElements()]; byte[] v = new byte[getNumberOfElements()];
int pos = 0; int pos = 0;
for ( PileupElement pile : pileupElementTracker ) { v[pos++] = pile.getBase(); } for (PileupElement pile : pileupElementTracker) {
v[pos++] = pile.getBase();
}
return v; return v;
} }
/** /**
* Returns an array of the quals in this pileup. Note this call costs O(n) and allocates fresh array each time * Returns an array of the quals in this pileup. Note this call costs O(n) and allocates fresh array each time
*
* @return * @return
*/ */
@Override @Override
public byte[] getQuals() { public byte[] getQuals() {
byte[] v = new byte[getNumberOfElements()]; byte[] v = new byte[getNumberOfElements()];
int pos = 0; int pos = 0;
for ( PileupElement pile : pileupElementTracker ) { v[pos++] = pile.getQual(); } for (PileupElement pile : pileupElementTracker) {
v[pos++] = pile.getQual();
}
return v; return v;
} }
/** /**
* Get an array of the mapping qualities * Get an array of the mapping qualities
*
* @return * @return
*/ */
@Override @Override
public byte[] getMappingQuals() { public byte[] getMappingQuals() {
byte[] v = new byte[getNumberOfElements()]; byte[] v = new byte[getNumberOfElements()];
int pos = 0; int pos = 0;
for ( PileupElement pile : pileupElementTracker ) { v[pos++] = (byte)pile.getRead().getMappingQuality(); } for (PileupElement pile : pileupElementTracker) {
v[pos++] = (byte) pile.getRead().getMappingQuality();
}
return v; return v;
} }
static String quals2String( byte[] quals ) { static String quals2String(byte[] quals) {
StringBuilder qualStr = new StringBuilder(); StringBuilder qualStr = new StringBuilder();
for ( int qual : quals ) { for (int qual : quals) {
qual = Math.min(qual, 63); // todo: fixme, this isn't a good idea qual = Math.min(qual, 63); // todo: fixme, this isn't a good idea
char qualChar = (char) (33 + qual); // todo: warning, this is illegal for qual > 63 char qualChar = (char) (33 + qual); // todo: warning, this is illegal for qual > 63
qualStr.append(qualChar); qualStr.append(qualChar);

View File

@ -12,7 +12,7 @@ import java.util.Arrays;
* are seen on the base-by-base basis (i.e. the pileup does keep the information about the current reference base being deleted * are seen on the base-by-base basis (i.e. the pileup does keep the information about the current reference base being deleted
* in some reads), but the information about the extended event (deletion length, string of all deleted bases) is not kept. * in some reads), but the information about the extended event (deletion length, string of all deleted bases) is not kept.
* The insertions that may be present in some reads are not seen at all in such strict reference traversal mode. * The insertions that may be present in some reads are not seen at all in such strict reference traversal mode.
* * <p/>
* By convention, any extended event (indel) is mapped onto the reference at the last base prior to the event (i.e. * By convention, any extended event (indel) is mapped onto the reference at the last base prior to the event (i.e.
* last base before the insertion or deletion). If the special "extended" traversal mode is turned on and there is * last base before the insertion or deletion). If the special "extended" traversal mode is turned on and there is
* an indel in at least one read that maps onto the reference position Z, the walker's map function will be called twice: * an indel in at least one read that maps onto the reference position Z, the walker's map function will be called twice:
@ -22,9 +22,9 @@ import java.util.Arrays;
* (covered) reference position. Note that if the extended event at Z was a deletion, the "standard" base pileup at * (covered) reference position. Note that if the extended event at Z was a deletion, the "standard" base pileup at
* Z+1 and following bases may still contain deleted bases. However the fully extended event call will be performed * Z+1 and following bases may still contain deleted bases. However the fully extended event call will be performed
* only once, at the position where the indel maps (starts). * only once, at the position where the indel maps (starts).
* * <p/>
* This class wraps an "extended" event (indel) so that in can be added to a pileup of events at a given location. * This class wraps an "extended" event (indel) so that in can be added to a pileup of events at a given location.
* * <p/>
* Created by IntelliJ IDEA. * Created by IntelliJ IDEA.
* User: asivache * User: asivache
* Date: Dec 21, 2009 * Date: Dec 21, 2009
@ -39,40 +39,52 @@ public class ExtendedEventPileupElement extends PileupElement {
private Type type = null; private Type type = null;
private int eventLength = -1; private int eventLength = -1;
private String eventBases = null; // if it is a deletion, we do not have information about the actual deleted bases private String eventBases = null; // if it is a deletion, we do not have information about the actual deleted bases
// in the read itself, so we fill the string with D's; for insertions we keep actual inserted bases // in the read itself, so we fill the string with D's; for insertions we keep actual inserted bases
private SAMRecord read; private SAMRecord read;
private int offset; // position in the read immediately BEFORE the event private int offset; // position in the read immediately BEFORE the event
// This is broken! offset is always zero because these member variables are shadowed by base class // This is broken! offset is always zero because these member variables are shadowed by base class
/** Constructor for extended pileup element (indel).
* public ExtendedEventPileupElement(GATKSAMRecord read, int offset, int eventLength, String eventBases, Type type) {
* @param read the read, in which the indel is observed super(read, offset, type == Type.DELETION);
* @param offset position in the read immediately before the indel (can be -1 if read starts with an insertion) this.read = read;
* @param length length of the indel (number of inserted or deleted bases); length <=0 indicates that the read has no indel (NOEVENT) this.offset = offset;
* @param eventBases inserted bases. null indicates that the event is a deletion; ignored if length<=0 (noevent) this.eventLength = eventLength;
*/ this.eventBases = eventBases;
public ExtendedEventPileupElement( GATKSAMRecord read, int offset, int length, byte[] eventBases ) { this.type = type;
super(read, offset);
this.eventLength = length;
if ( length <= 0 ) type = Type.NOEVENT;
else {
if ( eventBases != null ) {
this.eventBases = new String(eventBases).toUpperCase();
type = Type.INSERTION;
} else {
type = Type.DELETION;
}
}
} }
/** Constructor for deletion or noevent calls - does not take event bases as an argument (as those should /**
* be null or are ignored in these cases anyway) * Quick constructor for insertions.
* @param read *
* @param offset * @param read the read, in which the indel is observed
* @param length * @param offset position in the read immediately before the indel (can be -1 if read starts with an insertion)
* @param length length of the indel (number of inserted or deleted bases); length <=0 indicates that the read has no indel (NOEVENT)
* @param eventBases inserted bases. null indicates that the event is a deletion; ignored if length<=0 (noevent)
*/ */
public ExtendedEventPileupElement( GATKSAMRecord read, int offset, int length ) { public ExtendedEventPileupElement(GATKSAMRecord read, int offset, int length, byte[] eventBases) {
this(read,offset, length, null); this(read, offset, length, new String(eventBases).toUpperCase(), Type.INSERTION);
}
/**
* Quick constructor for non indels (matches)
*
* @param read the read
* @param offset where in the read the match is
*/
public ExtendedEventPileupElement(GATKSAMRecord read, int offset) {
this(read, offset, -1, null, Type.NOEVENT);
}
/**
* Quick constructor for deletions
*
* @param read the read
* @param offset the last base before the deletion starts (left aligned deletion)
* @param length length of this deletion
*/
public ExtendedEventPileupElement(GATKSAMRecord read, int offset, int length) {
this(read, offset, length, null, Type.DELETION);
} }
public boolean isDeletion() { public boolean isDeletion() {
@ -87,46 +99,54 @@ public class ExtendedEventPileupElement extends PileupElement {
return isDeletion() || isInsertion(); return isDeletion() || isInsertion();
} }
public Type getType() { return type; } public Type getType() {
return type;
}
// The offset can be negative with insertions at the start of the read, but a valid base does exist at this position with // The offset can be negative with insertions at the start of the read, but a valid base does exist at this position with
// a valid base quality. The following code attempts to compensate for that.' // a valid base quality. The following code attempts to compensate for that.'
@Override @Override
public byte getBase() { public byte getBase() {
return getBase(offset >= 0 ? offset : offset+eventLength); return getBase(offset >= 0 ? offset : offset + eventLength);
} }
@Override @Override
public int getBaseIndex() { public int getBaseIndex() {
return getBaseIndex(offset >= 0 ? offset : offset+eventLength); return getBaseIndex(offset >= 0 ? offset : offset + eventLength);
} }
@Override @Override
public byte getQual() { public byte getQual() {
return getQual(offset >= 0 ? offset : offset+eventLength); return getQual(offset >= 0 ? offset : offset + eventLength);
} }
/** Returns length of the event (number of inserted or deleted bases */ /**
public int getEventLength() { return eventLength; } * Returns length of the event (number of inserted or deleted bases
*/
public int getEventLength() {
return eventLength;
}
/** Returns actual sequence of inserted bases, or a null if the event is a deletion or if there is no event in the associated read. /**
* */ * Returns actual sequence of inserted bases, or a null if the event is a deletion or if there is no event in the associated read.
public String getEventBases() { return eventBases; } */
public String getEventBases() {
return eventBases;
}
@Override @Override
public String toString() { public String toString() {
char c = '.'; char c = '.';
String fillStr = null ; String fillStr = null;
if ( isDeletion() ) { if (isDeletion()) {
c = '-'; c = '-';
char [] filler = new char[eventLength]; char[] filler = new char[eventLength];
Arrays.fill(filler, 'D'); Arrays.fill(filler, 'D');
fillStr = new String(filler); fillStr = new String(filler);
} } else if (isInsertion()) c = '+';
else if ( isInsertion() ) c = '+'; return String.format("%s @ %d = %c%s MQ%d", getRead().getReadName(), getOffset(), c, isIndel() ?
return String.format("%s @ %d = %c%s MQ%d", getRead().getReadName(), getOffset(), c, isIndel()? (isInsertion() ? eventBases : fillStr) : "", getMappingQual());
(isInsertion() ? eventBases : fillStr ): "", getMappingQual());
} }
} }

View File

@ -3,6 +3,7 @@ package org.broadinstitute.sting.utils.pileup;
import com.google.java.contract.Ensures; import com.google.java.contract.Ensures;
import com.google.java.contract.Requires; import com.google.java.contract.Requires;
import org.broadinstitute.sting.utils.BaseUtils; import org.broadinstitute.sting.utils.BaseUtils;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord; import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
/** /**
@ -21,25 +22,61 @@ public class PileupElement implements Comparable<PileupElement> {
protected final GATKSAMRecord read; protected final GATKSAMRecord read;
protected final int offset; protected final int offset;
protected final boolean isDeletion;
/**
* Creates a new pileup element.
*
* @param read the read we are adding to the pileup
* @param offset the position in the read for this base. All deletions must be left aligned! (-1 is only allowed for reads starting with insertions)
* @param isDeletion whether or not this base is a deletion
*/
@Requires({ @Requires({
"read != null", "read != null",
"offset >= -1", "offset >= -1",
"offset <= read.getReadLength()"}) "offset <= read.getReadLength()"})
public PileupElement( GATKSAMRecord read, int offset ) { public PileupElement(GATKSAMRecord read, int offset, boolean isDeletion) {
if (offset < 0 && isDeletion)
throw new ReviewedStingException("Pileup Element cannot create a deletion with a negative offset");
this.read = read; this.read = read;
this.offset = offset; this.offset = offset;
this.isDeletion = isDeletion;
} }
// /**
// * Creates a NON DELETION pileup element.
// *
// * use this constructor only for insertions and matches/mismatches.
// * @param read the read we are adding to the pileup
// * @param offset the position in the read for this base. All deletions must be left aligned! (-1 is only allowed for reads starting with insertions)
// */
// @Requires({
// "read != null",
// "offset >= -1",
// "offset <= read.getReadLength()"})
// public PileupElement( GATKSAMRecord read, int offset ) {
// this(read, offset, false);
// }
//
public boolean isDeletion() { public boolean isDeletion() {
return isDeletion;
}
public boolean isInsertionAtBeginningOfRead() {
return offset == -1; return offset == -1;
} }
@Ensures("result != null") @Ensures("result != null")
public GATKSAMRecord getRead() { return read; } public GATKSAMRecord getRead() {
return read;
}
@Ensures("result == offset") @Ensures("result == offset")
public int getOffset() { return offset; } public int getOffset() {
return offset;
}
public byte getBase() { public byte getBase() {
return getBase(offset); return getBase(offset);
@ -59,30 +96,30 @@ public class PileupElement implements Comparable<PileupElement> {
@Ensures("result != null") @Ensures("result != null")
public String toString() { public String toString() {
return String.format("%s @ %d = %c Q%d", getRead().getReadName(), getOffset(), (char)getBase(), getQual()); return String.format("%s @ %d = %c Q%d", getRead().getReadName(), getOffset(), (char) getBase(), getQual());
} }
protected byte getBase(final int offset) { protected byte getBase(final int offset) {
return isDeletion() ? DELETION_BASE : read.getReadBases()[offset]; return (isDeletion() || isInsertionAtBeginningOfRead()) ? DELETION_BASE : read.getReadBases()[offset];
} }
protected int getBaseIndex(final int offset) { protected int getBaseIndex(final int offset) {
return BaseUtils.simpleBaseToBaseIndex(isDeletion() ? DELETION_BASE : read.getReadBases()[offset]); return BaseUtils.simpleBaseToBaseIndex((isDeletion() || isInsertionAtBeginningOfRead()) ? DELETION_BASE : read.getReadBases()[offset]);
} }
protected byte getQual(final int offset) { protected byte getQual(final int offset) {
return isDeletion() ? DELETION_QUAL : read.getBaseQualities()[offset]; return (isDeletion() || isInsertionAtBeginningOfRead()) ? DELETION_QUAL : read.getBaseQualities()[offset];
} }
@Override @Override
public int compareTo(final PileupElement pileupElement) { public int compareTo(final PileupElement pileupElement) {
if ( offset < pileupElement.offset ) if (offset < pileupElement.offset)
return -1; return -1;
else if ( offset > pileupElement.offset ) else if (offset > pileupElement.offset)
return 1; return 1;
else if ( read.getAlignmentStart() < pileupElement.read.getAlignmentStart() ) else if (read.getAlignmentStart() < pileupElement.read.getAlignmentStart())
return -1; return -1;
else if ( read.getAlignmentStart() > pileupElement.read.getAlignmentStart() ) else if (read.getAlignmentStart() > pileupElement.read.getAlignmentStart())
return 1; return 1;
else else
return 0; return 0;
@ -94,13 +131,26 @@ public class PileupElement implements Comparable<PileupElement> {
// //
// -------------------------------------------------------------------------- // --------------------------------------------------------------------------
public boolean isReducedRead() { // public boolean isReducedRead() {
return read.isReducedRead(); // return read.isReducedRead();
} // }
/**
* Returns the number of elements in the pileup element.
* <p/>
* Unless this is a reduced read, the number of elements in a pileup element is one. In the event of
* this being a reduced read and a deletion, we return the average number of elements between the left
* and right elements to the deletion. We assume the deletion to be left aligned.
*
* @return
*/
public int getRepresentativeCount() { public int getRepresentativeCount() {
// TODO -- if we ever decide to reduce the representation of deletions then this will need to be fixed int representativeCount = 1;
return (!isDeletion() && isReducedRead()) ? read.getReducedCount(offset) : 1;
if (read.isReducedRead() && !isInsertionAtBeginningOfRead())
representativeCount = (isDeletion()) ? Math.round((read.getReducedCount(offset) + read.getReducedCount(offset + 1)) / 2) : read.getReducedCount(offset);
return representativeCount;
} }
} }

View File

@ -30,33 +30,34 @@ import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import java.util.*; import java.util.*;
public class ReadBackedExtendedEventPileupImpl extends AbstractReadBackedPileup<ReadBackedExtendedEventPileupImpl,ExtendedEventPileupElement> implements ReadBackedExtendedEventPileup { public class ReadBackedExtendedEventPileupImpl extends AbstractReadBackedPileup<ReadBackedExtendedEventPileupImpl, ExtendedEventPileupElement> implements ReadBackedExtendedEventPileup {
private int nInsertions; private int nInsertions;
private int maxDeletionLength; // cached value of the length of the longest deletion observed at the site private int maxDeletionLength; // cached value of the length of the longest deletion observed at the site
public ReadBackedExtendedEventPileupImpl(GenomeLoc loc, List<ExtendedEventPileupElement> pileupElements) { public ReadBackedExtendedEventPileupImpl(GenomeLoc loc, List<ExtendedEventPileupElement> pileupElements) {
super(loc,pileupElements); super(loc, pileupElements);
} }
public ReadBackedExtendedEventPileupImpl(GenomeLoc loc, PileupElementTracker<ExtendedEventPileupElement> tracker) { public ReadBackedExtendedEventPileupImpl(GenomeLoc loc, PileupElementTracker<ExtendedEventPileupElement> tracker) {
super(loc,tracker); super(loc, tracker);
} }
/** /**
* Optimization of above constructor where all of the cached data is provided * Optimization of above constructor where all of the cached data is provided
*
* @param loc * @param loc
* @param pileup * @param pileup
*/ */
public ReadBackedExtendedEventPileupImpl(GenomeLoc loc, List<ExtendedEventPileupElement> pileup, int size, public ReadBackedExtendedEventPileupImpl(GenomeLoc loc, List<ExtendedEventPileupElement> pileup, int size,
int maxDeletionLength, int nInsertions, int nDeletions, int nMQ0Reads) { int maxDeletionLength, int nInsertions, int nDeletions, int nMQ0Reads) {
super(loc,pileup,size,nDeletions,nMQ0Reads); super(loc, pileup, size, nDeletions, nMQ0Reads);
this.maxDeletionLength = maxDeletionLength; this.maxDeletionLength = maxDeletionLength;
this.nInsertions = nInsertions; this.nInsertions = nInsertions;
} }
// this is the good new one // this is the good new one
public ReadBackedExtendedEventPileupImpl(GenomeLoc loc, Map<String,? extends ReadBackedExtendedEventPileupImpl> pileupElementsBySample) { public ReadBackedExtendedEventPileupImpl(GenomeLoc loc, Map<String, ? extends ReadBackedExtendedEventPileupImpl> pileupElementsBySample) {
super(loc,pileupElementsBySample); super(loc, pileupElementsBySample);
} }
/** /**
@ -71,31 +72,31 @@ public class ReadBackedExtendedEventPileupImpl extends AbstractReadBackedPileup<
nInsertions = 0; nInsertions = 0;
nMQ0Reads = 0; nMQ0Reads = 0;
for ( ExtendedEventPileupElement p : this.toExtendedIterable() ) { for (ExtendedEventPileupElement p : this.toExtendedIterable()) {
if ( p.isDeletion() ) { if (p.isDeletion()) {
maxDeletionLength = Math.max(maxDeletionLength, p.getEventLength()); maxDeletionLength = Math.max(maxDeletionLength, p.getEventLength());
} else { } else {
if ( p.isInsertion() ) nInsertions++; if (p.isInsertion()) nInsertions++;
} }
} }
} }
@Override @Override
protected void addPileupToCumulativeStats(AbstractReadBackedPileup<ReadBackedExtendedEventPileupImpl,ExtendedEventPileupElement> pileup) { protected void addPileupToCumulativeStats(AbstractReadBackedPileup<ReadBackedExtendedEventPileupImpl, ExtendedEventPileupElement> pileup) {
super.addPileupToCumulativeStats(pileup); super.addPileupToCumulativeStats(pileup);
ReadBackedExtendedEventPileup extendedEventPileup = ((ReadBackedExtendedEventPileup)pileup); ReadBackedExtendedEventPileup extendedEventPileup = ((ReadBackedExtendedEventPileup) pileup);
this.nInsertions += extendedEventPileup.getNumberOfInsertions(); this.nInsertions += extendedEventPileup.getNumberOfInsertions();
this.maxDeletionLength += extendedEventPileup.getMaxDeletionLength(); this.maxDeletionLength += extendedEventPileup.getMaxDeletionLength();
} }
@Override @Override
protected ReadBackedExtendedEventPileupImpl createNewPileup(GenomeLoc loc, PileupElementTracker<ExtendedEventPileupElement> tracker) { protected ReadBackedExtendedEventPileupImpl createNewPileup(GenomeLoc loc, PileupElementTracker<ExtendedEventPileupElement> tracker) {
return new ReadBackedExtendedEventPileupImpl(loc,tracker); return new ReadBackedExtendedEventPileupImpl(loc, tracker);
} }
@Override @Override
protected ExtendedEventPileupElement createNewPileupElement(GATKSAMRecord read, int offset) { protected ExtendedEventPileupElement createNewPileupElement(GATKSAMRecord read, int offset, boolean isDeletion) {
throw new UnsupportedOperationException("Not enough information provided to create a new pileup element"); throw new UnsupportedOperationException("Not enough information provided to create a new pileup element");
} }
@ -110,10 +111,12 @@ public class ReadBackedExtendedEventPileupImpl extends AbstractReadBackedPileup<
return nInsertions; return nInsertions;
} }
/** Returns the length of the longest deletion observed at the site this /**
* Returns the length of the longest deletion observed at the site this
* pileup is associated with (NOTE: by convention, both insertions and deletions * pileup is associated with (NOTE: by convention, both insertions and deletions
* are associated with genomic location immediately before the actual event). If * are associated with genomic location immediately before the actual event). If
* there are no deletions at the site, returns 0. * there are no deletions at the site, returns 0.
*
* @return * @return
*/ */
@Override @Override
@ -123,36 +126,47 @@ public class ReadBackedExtendedEventPileupImpl extends AbstractReadBackedPileup<
public Iterable<ExtendedEventPileupElement> toExtendedIterable() { public Iterable<ExtendedEventPileupElement> toExtendedIterable() {
return new Iterable<ExtendedEventPileupElement>() { return new Iterable<ExtendedEventPileupElement>() {
public Iterator<ExtendedEventPileupElement> iterator() { return pileupElementTracker.iterator(); } public Iterator<ExtendedEventPileupElement> iterator() {
return pileupElementTracker.iterator();
}
}; };
} }
/** /**
* Returns an array of the events in this pileup ('I', 'D', or '.'). Note this call costs O(n) and allocates fresh array each time * Returns an array of the events in this pileup ('I', 'D', or '.'). Note this call costs O(n) and allocates fresh array each time
*
* @return * @return
*/ */
@Override @Override
public byte[] getEvents() { public byte[] getEvents() {
byte[] v = new byte[getNumberOfElements()]; byte[] v = new byte[getNumberOfElements()];
int i = 0; int i = 0;
for ( ExtendedEventPileupElement e : this.toExtendedIterable() ) { for (ExtendedEventPileupElement e : this.toExtendedIterable()) {
switch ( e.getType() ) { switch (e.getType()) {
case INSERTION: v[i] = 'I'; break; case INSERTION:
case DELETION: v[i] = 'D'; break; v[i] = 'I';
case NOEVENT: v[i] = '.'; break; break;
default: throw new ReviewedStingException("Unknown event type encountered: "+e.getType()); case DELETION:
v[i] = 'D';
break;
case NOEVENT:
v[i] = '.';
break;
default:
throw new ReviewedStingException("Unknown event type encountered: " + e.getType());
} }
i++; i++;
} }
return v; return v;
} }
/** A shortcut for getEventStringsWithCounts(null); /**
* A shortcut for getEventStringsWithCounts(null);
* *
* @return * @return
*/ */
@Override @Override
public List<Pair<String,Integer>> getEventStringsWithCounts() { public List<Pair<String, Integer>> getEventStringsWithCounts() {
return getEventStringsWithCounts(null); return getEventStringsWithCounts(null);
} }
@ -163,44 +177,48 @@ public class ReadBackedExtendedEventPileupImpl extends AbstractReadBackedPileup<
// insertion, deletion or no-event, respectively. // insertion, deletion or no-event, respectively.
return String.format("%s %s E %s", return String.format("%s %s E %s",
getLocation().getContig(), getLocation().getStart(), // chromosome name and coordinate getLocation().getContig(), getLocation().getStart(), // chromosome name and coordinate
new String(getEvents()) ); new String(getEvents()));
} }
/** Returns String representation of all distinct extended events (indels) at the site along with /**
* Returns String representation of all distinct extended events (indels) at the site along with
* observation counts (numbers of reads) for each distinct event. If refBases is null, a simple string representation for * observation counts (numbers of reads) for each distinct event. If refBases is null, a simple string representation for
* deletions will be generated as "<length>D" (i.e. "5D"); if the reference bases are provided, the actual * deletions will be generated as "<length>D" (i.e. "5D"); if the reference bases are provided, the actual
* deleted sequence will be used in the string representation (e.g. "-AAC"). * deleted sequence will be used in the string representation (e.g. "-AAC").
* @param refBases reference bases, starting with the current locus (i.e. the one immediately before the indel), and *
* extending far enough to accomodate the longest deletion (i.e. size of refBases must be at least 1+<length of longest deletion>) * @param refBases reference bases, starting with the current locus (i.e. the one immediately before the indel), and
* extending far enough to accomodate the longest deletion (i.e. size of refBases must be at least 1+<length of longest deletion>)
* @return list of distinct events; first element of a pair is a string representation of the event, second element * @return list of distinct events; first element of a pair is a string representation of the event, second element
* gives the number of reads, in which that event was observed * gives the number of reads, in which that event was observed
*/ */
@Override @Override
public List<Pair<String,Integer>> getEventStringsWithCounts(byte[] refBases) { public List<Pair<String, Integer>> getEventStringsWithCounts(byte[] refBases) {
Map<String, Integer> events = new HashMap<String,Integer>(); Map<String, Integer> events = new HashMap<String, Integer>();
for ( ExtendedEventPileupElement e : this.toExtendedIterable() ) { for (ExtendedEventPileupElement e : this.toExtendedIterable()) {
Integer cnt; Integer cnt;
String indel = null; String indel = null;
switch ( e.getType() ) { switch (e.getType()) {
case INSERTION: case INSERTION:
indel = "+"+e.getEventBases(); indel = "+" + e.getEventBases();
break; break;
case DELETION: case DELETION:
indel = getDeletionString(e.getEventLength(),refBases); indel = getDeletionString(e.getEventLength(), refBases);
break; break;
case NOEVENT: continue; case NOEVENT:
default: throw new ReviewedStingException("Unknown event type encountered: "+e.getType()); continue;
default:
throw new ReviewedStingException("Unknown event type encountered: " + e.getType());
} }
cnt = events.get(indel); cnt = events.get(indel);
if ( cnt == null ) events.put(indel,1); if (cnt == null) events.put(indel, 1);
else events.put(indel,cnt.intValue()+1); else events.put(indel, cnt.intValue() + 1);
} }
List<Pair<String,Integer>> eventList = new ArrayList<Pair<String,Integer>>(events.size()); List<Pair<String, Integer>> eventList = new ArrayList<Pair<String, Integer>>(events.size());
for ( Map.Entry<String,Integer> m : events.entrySet() ) { for (Map.Entry<String, Integer> m : events.entrySet()) {
eventList.add( new Pair<String,Integer>(m.getKey(),m.getValue())); eventList.add(new Pair<String, Integer>(m.getKey(), m.getValue()));
} }
return eventList; return eventList;
} }
@ -208,18 +226,19 @@ public class ReadBackedExtendedEventPileupImpl extends AbstractReadBackedPileup<
/** /**
* Builds string representation of the deletion event. If refBases is null, the representation will be * Builds string representation of the deletion event. If refBases is null, the representation will be
* "<length>D" (e.g. "5D"); if the reference bases are provided, a verbose representation (e.g. "-AAC") * "<length>D" (e.g. "5D"); if the reference bases are provided, a verbose representation (e.g. "-AAC")
* will be generated. NOTE: refBases must start with the base prior to the actual deletion (i.e. deleted * will be generated. NOTE: refBases must start with the base prior to the actual deletion (i.e. deleted
* base(s) are refBase[1], refBase[2], ...), and the length of the passed array must be sufficient to accomodate the * base(s) are refBase[1], refBase[2], ...), and the length of the passed array must be sufficient to accomodate the
* deletion length (i.e. size of refBase must be at least length+1). * deletion length (i.e. size of refBase must be at least length+1).
*
* @param length * @param length
* @param refBases * @param refBases
* @return * @return
*/ */
private String getDeletionString(int length, byte[] refBases) { private String getDeletionString(int length, byte[] refBases) {
if ( refBases == null ) { if (refBases == null) {
return Integer.toString(length)+"D"; // if we do not have reference bases, we can only report something like "5D" return Integer.toString(length) + "D"; // if we do not have reference bases, we can only report something like "5D"
} else { } else {
return "-"+new String(refBases,1,length).toUpperCase(); return "-" + new String(refBases, 1, length).toUpperCase();
} }
} }
} }

View File

@ -29,48 +29,49 @@ import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
public class ReadBackedPileupImpl extends AbstractReadBackedPileup<ReadBackedPileupImpl,PileupElement> implements ReadBackedPileup { public class ReadBackedPileupImpl extends AbstractReadBackedPileup<ReadBackedPileupImpl, PileupElement> implements ReadBackedPileup {
public ReadBackedPileupImpl(GenomeLoc loc) { public ReadBackedPileupImpl(GenomeLoc loc) {
super(loc); super(loc);
} }
public ReadBackedPileupImpl(GenomeLoc loc, List<GATKSAMRecord> reads, List<Integer> offsets ) { public ReadBackedPileupImpl(GenomeLoc loc, List<GATKSAMRecord> reads, List<Integer> offsets) {
super(loc,reads,offsets); super(loc, reads, offsets);
} }
public ReadBackedPileupImpl(GenomeLoc loc, List<GATKSAMRecord> reads, int offset ) { public ReadBackedPileupImpl(GenomeLoc loc, List<GATKSAMRecord> reads, int offset) {
super(loc,reads,offset); super(loc, reads, offset);
} }
public ReadBackedPileupImpl(GenomeLoc loc, List<PileupElement> pileupElements) { public ReadBackedPileupImpl(GenomeLoc loc, List<PileupElement> pileupElements) {
super(loc,pileupElements); super(loc, pileupElements);
} }
public ReadBackedPileupImpl(GenomeLoc loc, Map<String,ReadBackedPileupImpl> pileupElementsBySample) { public ReadBackedPileupImpl(GenomeLoc loc, Map<String, ReadBackedPileupImpl> pileupElementsBySample) {
super(loc,pileupElementsBySample); super(loc, pileupElementsBySample);
} }
/** /**
* Optimization of above constructor where all of the cached data is provided * Optimization of above constructor where all of the cached data is provided
*
* @param loc * @param loc
* @param pileup * @param pileup
*/ */
public ReadBackedPileupImpl(GenomeLoc loc, List<PileupElement> pileup, int size, int nDeletions, int nMQ0Reads) { public ReadBackedPileupImpl(GenomeLoc loc, List<PileupElement> pileup, int size, int nDeletions, int nMQ0Reads) {
super(loc,pileup,size,nDeletions,nMQ0Reads); super(loc, pileup, size, nDeletions, nMQ0Reads);
} }
protected ReadBackedPileupImpl(GenomeLoc loc, PileupElementTracker<PileupElement> tracker) { protected ReadBackedPileupImpl(GenomeLoc loc, PileupElementTracker<PileupElement> tracker) {
super(loc,tracker); super(loc, tracker);
} }
@Override @Override
protected ReadBackedPileupImpl createNewPileup(GenomeLoc loc, PileupElementTracker<PileupElement> tracker) { protected ReadBackedPileupImpl createNewPileup(GenomeLoc loc, PileupElementTracker<PileupElement> tracker) {
return new ReadBackedPileupImpl(loc,tracker); return new ReadBackedPileupImpl(loc, tracker);
} }
@Override @Override
protected PileupElement createNewPileupElement(GATKSAMRecord read, int offset) { protected PileupElement createNewPileupElement(GATKSAMRecord read, int offset, boolean isDeletion) {
return new PileupElement(read,offset); return new PileupElement(read, offset, isDeletion);
} }
} }

View File

@ -27,7 +27,7 @@ public class ArtificialSAMUtils {
* @param chromosomeSize how large each chromosome is * @param chromosomeSize how large each chromosome is
* @param readsPerChomosome how many reads to make in each chromosome. They'll be aligned from position 1 to x (which is the number of reads) * @param readsPerChomosome how many reads to make in each chromosome. They'll be aligned from position 1 to x (which is the number of reads)
*/ */
public static void createArtificialBamFile( String filename, int numberOfChromosomes, int startingChromosome, int chromosomeSize, int readsPerChomosome ) { public static void createArtificialBamFile(String filename, int numberOfChromosomes, int startingChromosome, int chromosomeSize, int readsPerChomosome) {
SAMFileHeader header = createArtificialSamHeader(numberOfChromosomes, startingChromosome, chromosomeSize); SAMFileHeader header = createArtificialSamHeader(numberOfChromosomes, startingChromosome, chromosomeSize);
File outFile = new File(filename); File outFile = new File(filename);
@ -51,7 +51,7 @@ public class ArtificialSAMUtils {
* @param chromosomeSize how large each chromosome is * @param chromosomeSize how large each chromosome is
* @param readsPerChomosome how many reads to make in each chromosome. They'll be aligned from position 1 to x (which is the number of reads) * @param readsPerChomosome how many reads to make in each chromosome. They'll be aligned from position 1 to x (which is the number of reads)
*/ */
public static void createArtificialSamFile( String filename, int numberOfChromosomes, int startingChromosome, int chromosomeSize, int readsPerChomosome ) { public static void createArtificialSamFile(String filename, int numberOfChromosomes, int startingChromosome, int chromosomeSize, int readsPerChomosome) {
SAMFileHeader header = createArtificialSamHeader(numberOfChromosomes, startingChromosome, chromosomeSize); SAMFileHeader header = createArtificialSamHeader(numberOfChromosomes, startingChromosome, chromosomeSize);
File outFile = new File(filename); File outFile = new File(filename);
@ -72,16 +72,15 @@ public class ArtificialSAMUtils {
* @param numberOfChromosomes the number of chromosomes to create * @param numberOfChromosomes the number of chromosomes to create
* @param startingChromosome the starting number for the chromosome (most likely set to 1) * @param startingChromosome the starting number for the chromosome (most likely set to 1)
* @param chromosomeSize the length of each chromosome * @param chromosomeSize the length of each chromosome
*
* @return * @return
*/ */
public static SAMFileHeader createArtificialSamHeader( int numberOfChromosomes, int startingChromosome, int chromosomeSize ) { public static SAMFileHeader createArtificialSamHeader(int numberOfChromosomes, int startingChromosome, int chromosomeSize) {
SAMFileHeader header = new SAMFileHeader(); SAMFileHeader header = new SAMFileHeader();
header.setSortOrder(net.sf.samtools.SAMFileHeader.SortOrder.coordinate); header.setSortOrder(net.sf.samtools.SAMFileHeader.SortOrder.coordinate);
SAMSequenceDictionary dict = new SAMSequenceDictionary(); SAMSequenceDictionary dict = new SAMSequenceDictionary();
// make up some sequence records // make up some sequence records
for (int x = startingChromosome; x < startingChromosome + numberOfChromosomes; x++) { for (int x = startingChromosome; x < startingChromosome + numberOfChromosomes; x++) {
SAMSequenceRecord rec = new SAMSequenceRecord("chr" + ( x ), chromosomeSize /* size */); SAMSequenceRecord rec = new SAMSequenceRecord("chr" + (x), chromosomeSize /* size */);
rec.setSequenceLength(chromosomeSize); rec.setSequenceLength(chromosomeSize);
dict.addSequence(rec); dict.addSequence(rec);
} }
@ -95,10 +94,9 @@ public class ArtificialSAMUtils {
* @param header the header to set * @param header the header to set
* @param readGroupID the read group ID tag * @param readGroupID the read group ID tag
* @param sampleName the sample name * @param sampleName the sample name
*
* @return the adjusted SAMFileHeader * @return the adjusted SAMFileHeader
*/ */
public static SAMFileHeader createDefaultReadGroup( SAMFileHeader header, String readGroupID, String sampleName ) { public static SAMFileHeader createDefaultReadGroup(SAMFileHeader header, String readGroupID, String sampleName) {
SAMReadGroupRecord rec = new SAMReadGroupRecord(readGroupID); SAMReadGroupRecord rec = new SAMReadGroupRecord(readGroupID);
rec.setSample(sampleName); rec.setSample(sampleName);
List<SAMReadGroupRecord> readGroups = new ArrayList<SAMReadGroupRecord>(); List<SAMReadGroupRecord> readGroups = new ArrayList<SAMReadGroupRecord>();
@ -113,10 +111,9 @@ public class ArtificialSAMUtils {
* @param header the header to set * @param header the header to set
* @param readGroupIDs the read group ID tags * @param readGroupIDs the read group ID tags
* @param sampleNames the sample names * @param sampleNames the sample names
*
* @return the adjusted SAMFileHeader * @return the adjusted SAMFileHeader
*/ */
public static SAMFileHeader createEnumeratedReadGroups( SAMFileHeader header, List<String> readGroupIDs, List<String> sampleNames ) { public static SAMFileHeader createEnumeratedReadGroups(SAMFileHeader header, List<String> readGroupIDs, List<String> sampleNames) {
if (readGroupIDs.size() != sampleNames.size()) { if (readGroupIDs.size() != sampleNames.size()) {
throw new ReviewedStingException("read group count and sample name count must be the same"); throw new ReviewedStingException("read group count and sample name count must be the same");
} }
@ -137,18 +134,16 @@ public class ArtificialSAMUtils {
/** /**
* Create an artificial read based on the parameters. The cigar string will be *M, where * is the length of the read * Create an artificial read based on the parameters. The cigar string will be *M, where * is the length of the read
* *
*
* @param header the SAM header to associate the read with * @param header the SAM header to associate the read with
* @param name the name of the read * @param name the name of the read
* @param refIndex the reference index, i.e. what chromosome to associate it with * @param refIndex the reference index, i.e. what chromosome to associate it with
* @param alignmentStart where to start the alignment * @param alignmentStart where to start the alignment
* @param length the length of the read * @param length the length of the read
*
* @return the artificial read * @return the artificial read
*/ */
public static GATKSAMRecord createArtificialRead(SAMFileHeader header, String name, int refIndex, int alignmentStart, int length) { public static GATKSAMRecord createArtificialRead(SAMFileHeader header, String name, int refIndex, int alignmentStart, int length) {
if( (refIndex == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX && alignmentStart != SAMRecord.NO_ALIGNMENT_START) || if ((refIndex == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX && alignmentStart != SAMRecord.NO_ALIGNMENT_START) ||
(refIndex != SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX && alignmentStart == SAMRecord.NO_ALIGNMENT_START) ) (refIndex != SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX && alignmentStart == SAMRecord.NO_ALIGNMENT_START))
throw new ReviewedStingException("Invalid alignment start for artificial read, start = " + alignmentStart); throw new ReviewedStingException("Invalid alignment start for artificial read, start = " + alignmentStart);
GATKSAMRecord record = new GATKSAMRecord(header); GATKSAMRecord record = new GATKSAMRecord(header);
record.setReadName(name); record.setReadName(name);
@ -183,10 +178,9 @@ public class ArtificialSAMUtils {
* @param alignmentStart where to start the alignment * @param alignmentStart where to start the alignment
* @param bases the sequence of the read * @param bases the sequence of the read
* @param qual the qualities of the read * @param qual the qualities of the read
*
* @return the artificial read * @return the artificial read
*/ */
public static GATKSAMRecord createArtificialRead( SAMFileHeader header, String name, int refIndex, int alignmentStart, byte[] bases, byte[] qual ) { public static GATKSAMRecord createArtificialRead(SAMFileHeader header, String name, int refIndex, int alignmentStart, byte[] bases, byte[] qual) {
if (bases.length != qual.length) { if (bases.length != qual.length) {
throw new ReviewedStingException("Passed in read string is different length then the quality array"); throw new ReviewedStingException("Passed in read string is different length then the quality array");
} }
@ -210,10 +204,9 @@ public class ArtificialSAMUtils {
* @param bases the sequence of the read * @param bases the sequence of the read
* @param qual the qualities of the read * @param qual the qualities of the read
* @param cigar the cigar string of the read * @param cigar the cigar string of the read
*
* @return the artificial read * @return the artificial read
*/ */
public static GATKSAMRecord createArtificialRead( SAMFileHeader header, String name, int refIndex, int alignmentStart, byte[] bases, byte[] qual, String cigar ) { public static GATKSAMRecord createArtificialRead(SAMFileHeader header, String name, int refIndex, int alignmentStart, byte[] bases, byte[] qual, String cigar) {
GATKSAMRecord rec = createArtificialRead(header, name, refIndex, alignmentStart, bases, qual); GATKSAMRecord rec = createArtificialRead(header, name, refIndex, alignmentStart, bases, qual);
rec.setCigarString(cigar); rec.setCigarString(cigar);
return rec; return rec;
@ -221,22 +214,21 @@ public class ArtificialSAMUtils {
/** /**
* Create an artificial read with the following default parameters : * Create an artificial read with the following default parameters :
* header: * header:
* numberOfChromosomes = 1 * numberOfChromosomes = 1
* startingChromosome = 1 * startingChromosome = 1
* chromosomeSize = 1000000 * chromosomeSize = 1000000
* read: * read:
* name = "default_read" * name = "default_read"
* refIndex = 0 * refIndex = 0
* alignmentStart = 1 * alignmentStart = 1
*
* @param bases the sequence of the read
* @param qual the qualities of the read
* @param cigar the cigar string of the read
* *
* @param bases the sequence of the read
* @param qual the qualities of the read
* @param cigar the cigar string of the read
* @return the artificial read * @return the artificial read
*/ */
public static GATKSAMRecord createArtificialRead( byte[] bases, byte[] qual, String cigar ) { public static GATKSAMRecord createArtificialRead(byte[] bases, byte[] qual, String cigar) {
SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000); SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000);
return ArtificialSAMUtils.createArtificialRead(header, "default_read", 0, 10000, bases, qual, cigar); return ArtificialSAMUtils.createArtificialRead(header, "default_read", 0, 10000, bases, qual, cigar);
} }
@ -253,7 +245,7 @@ public class ArtificialSAMUtils {
right.setProperPairFlag(true); right.setProperPairFlag(true);
left.setFirstOfPairFlag(leftIsFirst); left.setFirstOfPairFlag(leftIsFirst);
right.setFirstOfPairFlag(! leftIsFirst); right.setFirstOfPairFlag(!leftIsFirst);
left.setReadNegativeStrandFlag(leftIsNegative); left.setReadNegativeStrandFlag(leftIsNegative);
left.setMateNegativeStrandFlag(!leftIsNegative); left.setMateNegativeStrandFlag(!leftIsNegative);
@ -279,11 +271,10 @@ public class ArtificialSAMUtils {
* @param startingChr the chromosome (reference ID) to start from * @param startingChr the chromosome (reference ID) to start from
* @param endingChr the id to end with * @param endingChr the id to end with
* @param readCount the number of reads per chromosome * @param readCount the number of reads per chromosome
*
* @return StingSAMIterator representing the specified amount of fake data * @return StingSAMIterator representing the specified amount of fake data
*/ */
public static StingSAMIterator mappedReadIterator( int startingChr, int endingChr, int readCount ) { public static StingSAMIterator mappedReadIterator(int startingChr, int endingChr, int readCount) {
SAMFileHeader header = createArtificialSamHeader(( endingChr - startingChr ) + 1, startingChr, readCount + DEFAULT_READ_LENGTH); SAMFileHeader header = createArtificialSamHeader((endingChr - startingChr) + 1, startingChr, readCount + DEFAULT_READ_LENGTH);
return new ArtificialSAMQueryIterator(startingChr, endingChr, readCount, 0, header); return new ArtificialSAMQueryIterator(startingChr, endingChr, readCount, 0, header);
} }
@ -295,11 +286,10 @@ public class ArtificialSAMUtils {
* @param endingChr the id to end with * @param endingChr the id to end with
* @param readCount the number of reads per chromosome * @param readCount the number of reads per chromosome
* @param unmappedReadCount the count of unmapped reads to place at the end of the iterator, like in a sorted bam file * @param unmappedReadCount the count of unmapped reads to place at the end of the iterator, like in a sorted bam file
*
* @return StingSAMIterator representing the specified amount of fake data * @return StingSAMIterator representing the specified amount of fake data
*/ */
public static StingSAMIterator mappedAndUnmappedReadIterator( int startingChr, int endingChr, int readCount, int unmappedReadCount ) { public static StingSAMIterator mappedAndUnmappedReadIterator(int startingChr, int endingChr, int readCount, int unmappedReadCount) {
SAMFileHeader header = createArtificialSamHeader(( endingChr - startingChr ) + 1, startingChr, readCount + DEFAULT_READ_LENGTH); SAMFileHeader header = createArtificialSamHeader((endingChr - startingChr) + 1, startingChr, readCount + DEFAULT_READ_LENGTH);
return new ArtificialSAMQueryIterator(startingChr, endingChr, readCount, unmappedReadCount, header); return new ArtificialSAMQueryIterator(startingChr, endingChr, readCount, unmappedReadCount, header);
} }
@ -310,11 +300,10 @@ public class ArtificialSAMUtils {
* @param startingChr the chromosome (reference ID) to start from * @param startingChr the chromosome (reference ID) to start from
* @param endingChr the id to end with * @param endingChr the id to end with
* @param readCount the number of reads per chromosome * @param readCount the number of reads per chromosome
*
* @return StingSAMIterator representing the specified amount of fake data * @return StingSAMIterator representing the specified amount of fake data
*/ */
public static ArtificialSAMQueryIterator queryReadIterator( int startingChr, int endingChr, int readCount ) { public static ArtificialSAMQueryIterator queryReadIterator(int startingChr, int endingChr, int readCount) {
SAMFileHeader header = createArtificialSamHeader(( endingChr - startingChr ) + 1, startingChr, readCount + DEFAULT_READ_LENGTH); SAMFileHeader header = createArtificialSamHeader((endingChr - startingChr) + 1, startingChr, readCount + DEFAULT_READ_LENGTH);
return new ArtificialSAMQueryIterator(startingChr, endingChr, readCount, 0, header); return new ArtificialSAMQueryIterator(startingChr, endingChr, readCount, 0, header);
} }
@ -326,11 +315,10 @@ public class ArtificialSAMUtils {
* @param endingChr the id to end with * @param endingChr the id to end with
* @param readCount the number of reads per chromosome * @param readCount the number of reads per chromosome
* @param unmappedReadCount the count of unmapped reads to place at the end of the iterator, like in a sorted bam file * @param unmappedReadCount the count of unmapped reads to place at the end of the iterator, like in a sorted bam file
*
* @return StingSAMIterator representing the specified amount of fake data * @return StingSAMIterator representing the specified amount of fake data
*/ */
public static StingSAMIterator queryReadIterator( int startingChr, int endingChr, int readCount, int unmappedReadCount ) { public static StingSAMIterator queryReadIterator(int startingChr, int endingChr, int readCount, int unmappedReadCount) {
SAMFileHeader header = createArtificialSamHeader(( endingChr - startingChr ) + 1, startingChr, readCount + DEFAULT_READ_LENGTH); SAMFileHeader header = createArtificialSamHeader((endingChr - startingChr) + 1, startingChr, readCount + DEFAULT_READ_LENGTH);
return new ArtificialSAMQueryIterator(startingChr, endingChr, readCount, unmappedReadCount, header); return new ArtificialSAMQueryIterator(startingChr, endingChr, readCount, unmappedReadCount, header);
} }
@ -345,6 +333,7 @@ public class ArtificialSAMUtils {
* reads created that have readLen bases. Pairs are sampled from a gaussian distribution with mean insert * reads created that have readLen bases. Pairs are sampled from a gaussian distribution with mean insert
* size of insertSize and variation of insertSize / 10. The first read will be in the pileup, and the second * size of insertSize and variation of insertSize / 10. The first read will be in the pileup, and the second
* may be, depending on where this sampled insertSize puts it. * may be, depending on where this sampled insertSize puts it.
*
* @param header * @param header
* @param loc * @param loc
* @param readLen * @param readLen
@ -360,22 +349,22 @@ public class ArtificialSAMUtils {
final int pos = loc.getStart(); final int pos = loc.getStart();
final List<PileupElement> pileupElements = new ArrayList<PileupElement>(); final List<PileupElement> pileupElements = new ArrayList<PileupElement>();
for ( int i = 0; i < pileupSize / 2; i++ ) { for (int i = 0; i < pileupSize / 2; i++) {
final String readName = "read" + i; final String readName = "read" + i;
final int leftStart = ranIntInclusive(ran, 1, pos); final int leftStart = ranIntInclusive(ran, 1, pos);
final int fragmentSize = (int)(ran.nextGaussian() * insertSizeVariation + insertSize); final int fragmentSize = (int) (ran.nextGaussian() * insertSizeVariation + insertSize);
final int rightStart = leftStart + fragmentSize - readLen; final int rightStart = leftStart + fragmentSize - readLen;
if ( rightStart <= 0 ) continue; if (rightStart <= 0) continue;
List<GATKSAMRecord> pair = createPair(header, readName, readLen, leftStart, rightStart, leftIsFirst, leftIsNegative); List<GATKSAMRecord> pair = createPair(header, readName, readLen, leftStart, rightStart, leftIsFirst, leftIsNegative);
final GATKSAMRecord left = pair.get(0); final GATKSAMRecord left = pair.get(0);
final GATKSAMRecord right = pair.get(1); final GATKSAMRecord right = pair.get(1);
pileupElements.add(new PileupElement(left, pos - leftStart)); pileupElements.add(new PileupElement(left, pos - leftStart, false));
if ( pos >= right.getAlignmentStart() && pos <= right.getAlignmentEnd() ) { if (pos >= right.getAlignmentStart() && pos <= right.getAlignmentEnd()) {
pileupElements.add(new PileupElement(right, pos - rightStart)); pileupElements.add(new PileupElement(right, pos - rightStart, false));
} }
} }

View File

@ -1,13 +1,20 @@
package org.broadinstitute.sting; package org.broadinstitute.sting;
import org.apache.log4j.*; import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.PatternLayout;
import org.apache.log4j.spi.LoggingEvent; import org.apache.log4j.spi.LoggingEvent;
import org.broadinstitute.sting.commandline.CommandLineUtils; import org.broadinstitute.sting.commandline.CommandLineUtils;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.io.IOUtils; import org.broadinstitute.sting.utils.io.IOUtils;
import java.io.*; import java.io.File;
import java.util.*; import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/** /**
* *

View File

@ -28,7 +28,7 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest {
public void testMultiSamplePilot1() { public void testMultiSamplePilot1() {
WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
baseCommand + " -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -o %s -L 1:10,022,000-10,025,000", 1, baseCommand + " -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -o %s -L 1:10,022,000-10,025,000", 1,
Arrays.asList("d61c7055bd09024abb8902bde6bd3960")); Arrays.asList("653172b43b19003d9f7df6dab21f4b09"));
executeTest("test MultiSample Pilot1", spec); executeTest("test MultiSample Pilot1", spec);
} }
@ -227,7 +227,7 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest {
" -o %s" + " -o %s" +
" -L 1:10,000,000-10,500,000", " -L 1:10,000,000-10,500,000",
1, 1,
Arrays.asList("b11df6587e4e16cb819d76a900446946")); Arrays.asList("bd9d3d50a1f49605d7cd592a0f446899"));
executeTest(String.format("test indel caller in SLX"), spec); executeTest(String.format("test indel caller in SLX"), spec);
} }
@ -255,7 +255,7 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest {
" -o %s" + " -o %s" +
" -L 1:10,000,000-10,500,000", " -L 1:10,000,000-10,500,000",
1, 1,
Arrays.asList("59068bc8888ad5f08790946066d76602")); Arrays.asList("91cd6d2e3972b0b8e4064bb35a33241f"));
executeTest(String.format("test indel calling, multiple technologies"), spec); executeTest(String.format("test indel calling, multiple technologies"), spec);
} }
@ -294,7 +294,7 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest {
WalkerTest.WalkerTestSpec spec4 = new WalkerTest.WalkerTestSpec( WalkerTest.WalkerTestSpec spec4 = new WalkerTest.WalkerTestSpec(
baseCommandIndelsb37 + " --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + validationDataLocation + "ALL.wgs.union_v2_chr20_100_110K.20101123.indels.sites.vcf -I " + validationDataLocation + baseCommandIndelsb37 + " --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + validationDataLocation + "ALL.wgs.union_v2_chr20_100_110K.20101123.indels.sites.vcf -I " + validationDataLocation +
"phase1_GBR_realigned.chr20.100K-110K.bam -o %s -L 20:100,000-110,000", 1, "phase1_GBR_realigned.chr20.100K-110K.bam -o %s -L 20:100,000-110,000", 1,
Arrays.asList("fcd590a55f5fec2a9b7e628187d6b8a8")); Arrays.asList("877de5b0cc61dc54636062df6399b978"));
executeTest("test MultiSample Phase1 indels with complicated records", spec4); executeTest("test MultiSample Phase1 indels with complicated records", spec4);
} }

View File

@ -42,12 +42,12 @@ public class ReadUtilsUnitTest extends BaseTest {
@Test @Test
public void testReducedReadPileupElement() { public void testReducedReadPileupElement() {
PileupElement readp = new PileupElement(read, 0); PileupElement readp = new PileupElement(read, 0, false);
PileupElement reducedreadp = new PileupElement(reducedRead, 0); PileupElement reducedreadp = new PileupElement(reducedRead, 0, false);
Assert.assertFalse(readp.isReducedRead()); Assert.assertFalse(readp.getRead().isReducedRead());
Assert.assertTrue(reducedreadp.isReducedRead()); Assert.assertTrue(reducedreadp.getRead().isReducedRead());
Assert.assertEquals(reducedreadp.getRepresentativeCount(), REDUCED_READ_COUNTS[0]); Assert.assertEquals(reducedreadp.getRepresentativeCount(), REDUCED_READ_COUNTS[0]);
Assert.assertEquals(reducedreadp.getQual(), readp.getQual()); Assert.assertEquals(reducedreadp.getQual(), readp.getQual());
} }