Revisions to take into account finalization of Picard patch: naming changes, better definition

of public interfaces.  This won't be the last Picard patch, but it should be the last big one.


git-svn-id: file:///humgen/gsa-scr1/gsa-engineering/svn_contents/trunk@3096 348d0f76-0448-11de-a6fe-93d51630548a
This commit is contained in:
hanna 2010-03-30 19:28:14 +00:00
parent b00d2bf2bc
commit 400684542c
15 changed files with 41 additions and 41 deletions

View File

@ -74,7 +74,7 @@ public class BAMFileStat extends CommandLineProgram {
inspector.inspect(System.out,null,null);
}
private class BAMFileIndexContentInspector extends PreloadedBAMFileIndex {
private class BAMFileIndexContentInspector extends CachingBAMFileIndex {
public BAMFileIndexContentInspector(File bamFileIndex) {
super(bamFileIndex);
}

View File

@ -20,7 +20,7 @@ public interface BAMFormatAwareShard extends Shard {
* Get the list of chunks delimiting this shard.
* @return a list of chunks that contain data for this shard.
*/
public Map<SAMReaderID, BAMFileSpan> getFileSpans();
public Map<SAMReaderID,SAMFileSpan> getFileSpans();
/**
* Returns true if this shard is meant to buffer reads, rather

View File

@ -26,7 +26,7 @@ public class BlockDelimitedReadShard extends ReadShard implements BAMFormatAware
/**
* The data backing the next chunks to deliver to the traversal engine.
*/
private final Map<SAMReaderID,BAMFileSpan> fileSpans;
private final Map<SAMReaderID,SAMFileSpan> fileSpans;
/**
* The reads making up this shard.
@ -44,7 +44,7 @@ public class BlockDelimitedReadShard extends ReadShard implements BAMFormatAware
*/
private final Shard.ShardType shardType;
public BlockDelimitedReadShard(Reads sourceInfo, Map<SAMReaderID,BAMFileSpan> fileSpans, SamRecordFilter filter, Shard.ShardType shardType) {
public BlockDelimitedReadShard(Reads sourceInfo, Map<SAMReaderID,SAMFileSpan> fileSpans, SamRecordFilter filter, Shard.ShardType shardType) {
this.sourceInfo = sourceInfo;
this.fileSpans = fileSpans;
this.filter = filter;
@ -109,7 +109,7 @@ public class BlockDelimitedReadShard extends ReadShard implements BAMFormatAware
* @return a list of chunks that contain data for this shard.
*/
@Override
public Map<SAMReaderID,BAMFileSpan> getFileSpans() {
public Map<SAMReaderID,SAMFileSpan> getFileSpans() {
return Collections.unmodifiableMap(fileSpans);
}
@ -128,7 +128,7 @@ public class BlockDelimitedReadShard extends ReadShard implements BAMFormatAware
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
for(Map.Entry<SAMReaderID,BAMFileSpan> entry: fileSpans.entrySet()) {
for(Map.Entry<SAMReaderID,SAMFileSpan> entry: fileSpans.entrySet()) {
sb.append(entry.getKey());
sb.append(": ");
sb.append(entry.getValue());

View File

@ -55,7 +55,7 @@ public class BlockDelimitedReadShardStrategy extends ReadShardStrategy {
/**
* Ending position of the last shard in the file.
*/
private Map<SAMReaderID,BAMFileSpan> position;
private Map<SAMReaderID,SAMFileSpan> position;
/**
* Create a new read shard strategy, loading read shards from the given BAM file.
@ -103,18 +103,18 @@ public class BlockDelimitedReadShardStrategy extends ReadShardStrategy {
}
public void advance() {
Map<SAMReaderID,BAMFileSpan> shardPosition = new HashMap<SAMReaderID,BAMFileSpan>();
Map<SAMReaderID,SAMFileSpan> shardPosition = new HashMap<SAMReaderID,SAMFileSpan>();
nextShard = null;
SamRecordFilter filter = null;
if(locations != null) {
Map<SAMReaderID,BAMFileSpan> selectedReaders = new HashMap<SAMReaderID,BAMFileSpan>();
Map<SAMReaderID,SAMFileSpan> selectedReaders = new HashMap<SAMReaderID,SAMFileSpan>();
while(selectedReaders.size() == 0 && currentFilePointer != null) {
shardPosition = currentFilePointer.fileSpans;
for(SAMReaderID id: shardPosition.keySet()) {
BAMFileSpan fileSpans = shardPosition.get(id).removeBefore(position.get(id));
if(!fileSpans.isEmpty())
selectedReaders.put(id,fileSpans);
SAMFileSpan fileSpan = shardPosition.get(id).removeContentsBefore(position.get(id));
if(!fileSpan.isEmpty())
selectedReaders.put(id,fileSpan);
}
if(selectedReaders.size() > 0) {

View File

@ -5,7 +5,7 @@ import org.broadinstitute.sting.utils.StingException;
import org.broadinstitute.sting.gatk.iterators.StingSAMIterator;
import org.broadinstitute.sting.gatk.datasources.simpleDataSources.SAMReaderID;
import net.sf.samtools.SAMRecord;
import net.sf.samtools.BAMFileSpan;
import net.sf.samtools.SAMFileSpan;
import net.sf.picard.filter.SamRecordFilter;
import java.util.List;
@ -44,7 +44,7 @@ public class IndexDelimitedLocusShard extends LocusShard implements BAMFormatAwa
/**
* A list of the chunks associated with this shard.
*/
private final Map<SAMReaderID, BAMFileSpan> fileSpans;
private final Map<SAMReaderID,SAMFileSpan> fileSpans;
/**
* An IndexDelimitedLocusShard can be used either for LOCUS or LOCUS_INTERVAL shard types.
@ -58,7 +58,7 @@ public class IndexDelimitedLocusShard extends LocusShard implements BAMFormatAwa
* @param fileSpans File spans associated with that interval.
* @param shardType Type of the shard; must be either LOCUS or LOCUS_INTERVAL.
*/
IndexDelimitedLocusShard(List<GenomeLoc> intervals, Map<SAMReaderID,BAMFileSpan> fileSpans, ShardType shardType) {
IndexDelimitedLocusShard(List<GenomeLoc> intervals, Map<SAMReaderID,SAMFileSpan> fileSpans, ShardType shardType) {
super(intervals);
this.fileSpans = fileSpans;
if(shardType != ShardType.LOCUS && shardType != ShardType.LOCUS_INTERVAL)
@ -71,7 +71,7 @@ public class IndexDelimitedLocusShard extends LocusShard implements BAMFormatAwa
* @return A list of the file spans to use when retrieving locus data.
*/
@Override
public Map<SAMReaderID,BAMFileSpan> getFileSpans() {
public Map<SAMReaderID,SAMFileSpan> getFileSpans() {
return fileSpans;
}

View File

@ -13,7 +13,7 @@ import java.util.*;
import net.sf.samtools.SAMFileHeader;
import net.sf.samtools.SAMSequenceRecord;
import net.sf.samtools.BAMFileSpan;
import net.sf.samtools.SAMFileSpan;
/*
* Copyright (c) 2009 The Broad Institute
@ -126,7 +126,7 @@ public class IndexDelimitedLocusShardStrategy implements ShardStrategy {
*/
public IndexDelimitedLocusShard next() {
FilePointer nextFilePointer = filePointerIterator.next();
Map<SAMReaderID, BAMFileSpan> fileSpansBounding = nextFilePointer.fileSpans != null ? nextFilePointer.fileSpans : null;
Map<SAMReaderID,SAMFileSpan> fileSpansBounding = nextFilePointer.fileSpans != null ? nextFilePointer.fileSpans : null;
return new IndexDelimitedLocusShard(nextFilePointer.locations,fileSpansBounding,Shard.ShardType.LOCUS_INTERVAL);
}

View File

@ -75,11 +75,11 @@ public class IntervalSharder {
FilePointer lastFilePointer = null;
BAMOverlap lastBAMOverlap = null;
Map<SAMReaderID,PreloadedBAMFileIndex> readerToIndexMap = new HashMap<SAMReaderID,PreloadedBAMFileIndex>();
Map<SAMReaderID,CachingBAMFileIndex> readerToIndexMap = new HashMap<SAMReaderID,CachingBAMFileIndex>();
BinMergingIterator binMerger = new BinMergingIterator();
for(SAMReaderID id: dataSource.getReaderIDs()) {
final SAMSequenceRecord referenceSequence = dataSource.getHeader(id).getSequence(contig);
final PreloadedBAMFileIndex index = dataSource.getIndex(id);
final CachingBAMFileIndex index = dataSource.getIndex(id);
binMerger.addReader(id,
index,
referenceSequence.getSequenceIndex(),
@ -170,7 +170,7 @@ public class IntervalSharder {
// Lookup the locations for every file pointer in the index.
for(SAMReaderID id: dataSource.getReaderIDs()) {
PreloadedBAMFileIndex index = readerToIndexMap.get(id);
CachingBAMFileIndex index = readerToIndexMap.get(id);
for(FilePointer filePointer: filePointers)
filePointer.addFileSpans(id,index.getChunksOverlapping(filePointer.overlap.getBin(id)));
index.close();
@ -183,7 +183,7 @@ public class IntervalSharder {
private PriorityQueue<BinQueueState> binQueue = new PriorityQueue<BinQueueState>();
private Queue<BAMOverlap> pendingOverlaps = new LinkedList<BAMOverlap>();
public void addReader(final SAMReaderID id, final PreloadedBAMFileIndex index, final int referenceSequence, Iterator<Bin> bins) {
public void addReader(final SAMReaderID id, final CachingBAMFileIndex index, final int referenceSequence, Iterator<Bin> bins) {
binQueue.add(new BinQueueState(id,index,referenceSequence,new LowestLevelBinFilteringIterator(index,bins)));
}
@ -276,11 +276,11 @@ public class IntervalSharder {
private class ReaderBin {
public final SAMReaderID id;
public final PreloadedBAMFileIndex index;
public final CachingBAMFileIndex index;
public final int referenceSequence;
public final Bin bin;
public ReaderBin(final SAMReaderID id, final PreloadedBAMFileIndex index, final int referenceSequence, final Bin bin) {
public ReaderBin(final SAMReaderID id, final CachingBAMFileIndex index, final int referenceSequence, final Bin bin) {
this.id = id;
this.index = index;
this.referenceSequence = referenceSequence;
@ -298,11 +298,11 @@ public class IntervalSharder {
private class BinQueueState implements Comparable<BinQueueState> {
public final SAMReaderID id;
public final PreloadedBAMFileIndex index;
public final CachingBAMFileIndex index;
public final int referenceSequence;
public final PeekableIterator<Bin> bins;
public BinQueueState(final SAMReaderID id, final PreloadedBAMFileIndex index, final int referenceSequence, final Iterator<Bin> bins) {
public BinQueueState(final SAMReaderID id, final CachingBAMFileIndex index, final int referenceSequence, final Iterator<Bin> bins) {
this.id = id;
this.index = index;
this.referenceSequence = referenceSequence;
@ -334,12 +334,12 @@ public class IntervalSharder {
* Filters out bins not at the lowest level in the tree.
*/
private static class LowestLevelBinFilteringIterator implements Iterator<Bin> {
private PreloadedBAMFileIndex index;
private CachingBAMFileIndex index;
private Iterator<Bin> wrappedIterator;
private Bin nextBin;
public LowestLevelBinFilteringIterator(final PreloadedBAMFileIndex index, Iterator<Bin> iterator) {
public LowestLevelBinFilteringIterator(final CachingBAMFileIndex index, Iterator<Bin> iterator) {
this.index = index;
this.wrappedIterator = iterator;
advance();
@ -372,7 +372,7 @@ public class IntervalSharder {
* Represents a small section of a BAM file, and every associated interval.
*/
class FilePointer {
protected final Map<SAMReaderID,BAMFileSpan> fileSpans = new HashMap<SAMReaderID,BAMFileSpan>();
protected final Map<SAMReaderID,SAMFileSpan> fileSpans = new HashMap<SAMReaderID,SAMFileSpan>();
protected final String referenceSequence;
protected final BAMOverlap overlap;
protected final List<GenomeLoc> locations;
@ -393,7 +393,7 @@ class FilePointer {
locations.add(location);
}
public void addFileSpans(SAMReaderID id, BAMFileSpan fileSpan) {
public void addFileSpans(SAMReaderID id, SAMFileSpan fileSpan) {
this.fileSpans.put(id,fileSpan);
}
}

View File

@ -53,7 +53,7 @@ public class BlockDrivenSAMDataSource extends SAMDataSource {
/**
* How far along is each reader?
*/
private final Map<SAMReaderID,BAMFileSpan> readerPositions = new HashMap<SAMReaderID,BAMFileSpan>();
private final Map<SAMReaderID,SAMFileSpan> readerPositions = new HashMap<SAMReaderID,SAMFileSpan>();
/**
* Create a new block-aware SAM data source given the supplied read metadata.
@ -129,16 +129,16 @@ public class BlockDrivenSAMDataSource extends SAMDataSource {
* @param id Id of the reader.
* @return The index. Will preload the index if necessary.
*/
public PreloadedBAMFileIndex getIndex(final SAMReaderID id) {
public CachingBAMFileIndex getIndex(final SAMReaderID id) {
SAMReaders readers = resourcePool.getReadersWithoutLocking();
return readers.getReader(id).getIndex(PreloadedBAMFileIndex.class);
return readers.getReader(id).getIndex(CachingBAMFileIndex.class);
}
/**
* Retrieves the current position within the BAM file.
* @return A mapping of reader to current position.
*/
public Map<SAMReaderID,BAMFileSpan> getCurrentPosition() {
public Map<SAMReaderID,SAMFileSpan> getCurrentPosition() {
return readerPositions;
}
@ -182,7 +182,7 @@ public class BlockDrivenSAMDataSource extends SAMDataSource {
* @param read The read to add to the shard.
*/
private void addReadToBufferingShard(BAMFormatAwareShard shard,SAMReaderID id,SAMRecord read) {
BAMFileSpan endChunk = read.getFilePointer().getFilePointerFollowing();
SAMFileSpan endChunk = read.getFilePointer().getContentsFollowing();
shard.addRead(read);
readerPositions.put(id,endChunk);
}
@ -207,7 +207,7 @@ public class BlockDrivenSAMDataSource extends SAMDataSource {
*/
private void initializeReaderPositions(SAMReaders readers) {
for(SAMReaderID id: getReaderIDs())
readerPositions.put(id,readers.getReader(id).getStartOfDataSegment());
readerPositions.put(id,readers.getReader(id).getFilePointerSpanningReads());
}
public StingSAMIterator seek(Shard shard) {
@ -386,7 +386,7 @@ public class BlockDrivenSAMDataSource extends SAMDataSource {
*/
public SAMReaders(Reads sourceInfo) {
for(File readsFile: sourceInfo.getReadsFiles()) {
SAMFileReader reader = new SAMFileReader(readsFile,true);
SAMFileReader reader = new SAMFileReader(readsFile,CachingBAMFileIndex.class,true);
reader.setValidationStringency(sourceInfo.getValidationStringency());
// If no read group is present, hallucinate one.

View File

@ -341,5 +341,5 @@ public class GATKSAMRecord extends SAMRecord {
public String toString() { return mRecord.toString(); }
public BAMFileSpan getFilePointer() { return mRecord.getFilePointer(); }
public SAMFileSpan getFilePointer() { return mRecord.getFilePointer(); }
}

View File

@ -1,3 +1,3 @@
<ivy-module version="1.0">
<info organisation="edu.mit.broad" module="picard-private-parts" revision="1333-sharding-5" status="integration" publication="20100326102100" />
<info organisation="edu.mit.broad" module="picard-private-parts" revision="1333-sharding-6" status="integration" publication="20100330130700" />
</ivy-module>

View File

@ -1,3 +1,3 @@
<ivy-module version="1.0">
<info organisation="net.sf" module="picard" revision="1.16.365-sharding" status="release" />
<info organisation="net.sf" module="picard" revision="1.16.366-sharding" status="release" />
</ivy-module>

View File

@ -1,3 +1,3 @@
<ivy-module version="1.0">
<info organisation="net.sf" module="sam" revision="1.16.365-sharding" status="release" />
<info organisation="net.sf" module="sam" revision="1.16.366-sharding" status="release" />
</ivy-module>