fixed locus pile-up limiting problem

git-svn-id: file:///humgen/gsa-scr1/gsa-engineering/svn_contents/trunk@1505 348d0f76-0448-11de-a6fe-93d51630548a
This commit is contained in:
aaron 2009-09-02 16:56:44 +00:00
parent d8aff9a925
commit 0e6feff8f2
1 changed files with 48 additions and 46 deletions

View File

@ -60,6 +60,7 @@ public class LocusIteratorByHanger extends LocusIterator {
final boolean DEBUG = false; final boolean DEBUG = false;
boolean justCleared = false; boolean justCleared = false;
private Reads readInfo; private Reads readInfo;
// ----------------------------------------------------------------------------------------------------------------- // -----------------------------------------------------------------------------------------------------------------
// //
// constructors and other basic operations // constructors and other basic operations
@ -130,18 +131,28 @@ public class LocusIteratorByHanger extends LocusIterator {
} }
} }
protected void hangRead(final SAMRecord read) { protected boolean hangRead(final SAMRecord read, final int maximumPileupSize, boolean warned) {
GenomeLoc readLoc = GenomeLocParser.createGenomeLoc(read); GenomeLoc readLoc = GenomeLocParser.createGenomeLoc(read);
for (AlignmentBlock block : read.getAlignmentBlocks()) { for (AlignmentBlock block : read.getAlignmentBlocks()) {
if (DEBUG) logger.debug(String.format("Processing block %s len=%d", block, block.getLength())); if (DEBUG) logger.debug(String.format("Processing block %s len=%d", block, block.getLength()));
for (int i = 0; i < block.getLength(); i++) { for (int i = 0; i < block.getLength(); i++) {
// check to see if we've exceeded the maximum number of reads in the pile-up
GenomeLoc offset = GenomeLocParser.createGenomeLoc(readLoc.getContigIndex(), block.getReferenceStart() + i); GenomeLoc offset = GenomeLocParser.createGenomeLoc(readLoc.getContigIndex(), block.getReferenceStart() + i);
int hangerSize = (readHanger.hasLocation(offset)) ? readHanger.getHanger(offset).size() : -1;
if (hangerSize >= maximumPileupSize) {
if (!warned) {
warned = true;
Utils.warnUser("Unable to add a read, we're over the hanger limit of " + maximumPileupSize + " at location " + readHanger.getLeftLoc());
}
} else {
readHanger.expandingPut(offset, read); readHanger.expandingPut(offset, read);
offsetHanger.expandingPut(offset, block.getReadStart() + i - 1); offsetHanger.expandingPut(offset, block.getReadStart() + i - 1);
}
if (DEBUG) logger.debug(String.format(" # Added %s", offset)); if (DEBUG) logger.debug(String.format(" # Added %s", offset));
} }
} }
return warned; // did we warn the user about this location?
} }
private final boolean currentPositionIsFullyCovered(final GenomeLoc nextReadInStreamLoc) { private final boolean currentPositionIsFullyCovered(final GenomeLoc nextReadInStreamLoc) {
@ -179,7 +190,7 @@ public class LocusIteratorByHanger extends LocusIterator {
logger.debug(String.format("entering expandWindow..., hasNext=%b", it.hasNext())); logger.debug(String.format("entering expandWindow..., hasNext=%b", it.hasNext()));
printState(); printState();
} }
boolean warned = false; // warn them once per locus Boolean warned = false; // warn them once per locus
while (it.hasNext()) { while (it.hasNext()) {
if (DEBUG) { if (DEBUG) {
logger.debug(String.format("Expanding window")); logger.debug(String.format("Expanding window"));
@ -204,17 +215,8 @@ public class LocusIteratorByHanger extends LocusIterator {
// We've collected up enough reads // We've collected up enough reads
it.pushback(read); it.pushback(read);
break; break;
} } else
else warned = hangRead(read,maximumPileupSize,warned);
// check to see if we've exceeded the maximum number of reads in the pile-up
if (readHanger.size() < maximumPileupSize)
hangRead(read);
else {
// if we haven't warned the user for this locus, do so now
if (!warned)
Utils.warnUser("Unable to add a read, we're over the hanger limit of " + maximumPileupSize + " at location " + readHanger.getLeftLoc());
warned = true;
}
} }
} }