Fix for Kiran's sharding issue (Invalid GZIP header). General cleanup of

Picard patch, including move of some of the Picard private classes we use to Picard public.


git-svn-id: file:///humgen/gsa-scr1/gsa-engineering/svn_contents/trunk@3087 348d0f76-0448-11de-a6fe-93d51630548a
This commit is contained in:
hanna 2010-03-29 03:21:27 +00:00
parent a45ac220aa
commit 85037ab13f
27 changed files with 76 additions and 100 deletions

View File

@ -1,8 +1,6 @@
package org.broadinstitute.sting.gatk.datasources.shards;
import net.sf.samtools.Chunk;
import net.sf.samtools.SAMFileReader;
import net.sf.samtools.SAMRecord;
import net.sf.samtools.*;
import net.sf.picard.filter.SamRecordFilter;
import java.util.List;
@ -22,7 +20,7 @@ public interface BAMFormatAwareShard extends Shard {
* Get the list of chunks delimiting this shard.
* @return a list of chunks that contain data for this shard.
*/
public Map<SAMReaderID,List<Chunk>> getChunks();
public Map<SAMReaderID, BAMFileSpan> getFileSpans();
/**
* Returns true if this shard is meant to buffer reads, rather

View File

@ -1,8 +1,6 @@
package org.broadinstitute.sting.gatk.datasources.shards;
import net.sf.samtools.Chunk;
import net.sf.samtools.SAMFileReader;
import net.sf.samtools.SAMRecord;
import net.sf.samtools.*;
import net.sf.picard.filter.SamRecordFilter;
import java.util.*;
@ -28,7 +26,7 @@ public class BlockDelimitedReadShard extends ReadShard implements BAMFormatAware
/**
* The data backing the next chunks to deliver to the traversal engine.
*/
private final Map<SAMReaderID,List<Chunk>> chunks;
private final Map<SAMReaderID,BAMFileSpan> fileSpans;
/**
* The reads making up this shard.
@ -46,9 +44,9 @@ public class BlockDelimitedReadShard extends ReadShard implements BAMFormatAware
*/
private final Shard.ShardType shardType;
public BlockDelimitedReadShard(Reads sourceInfo, Map<SAMReaderID,List<Chunk>> chunks, SamRecordFilter filter, Shard.ShardType shardType) {
public BlockDelimitedReadShard(Reads sourceInfo, Map<SAMReaderID,BAMFileSpan> fileSpans, SamRecordFilter filter, Shard.ShardType shardType) {
this.sourceInfo = sourceInfo;
this.chunks = chunks;
this.fileSpans = fileSpans;
this.filter = filter;
this.shardType = shardType;
}
@ -58,6 +56,7 @@ public class BlockDelimitedReadShard extends ReadShard implements BAMFormatAware
* than just holding pointers to their locations.
* @return True if this shard can buffer reads. False otherwise.
*/
@Override
public boolean buffersReads() {
return true;
}
@ -66,6 +65,7 @@ public class BlockDelimitedReadShard extends ReadShard implements BAMFormatAware
* Returns true if the read buffer is currently full.
* @return True if this shard's buffer is full (and the shard can buffer reads).
*/
@Override
public boolean isBufferEmpty() {
return reads.size() == 0;
}
@ -74,6 +74,7 @@ public class BlockDelimitedReadShard extends ReadShard implements BAMFormatAware
* Returns true if the read buffer is currently full.
* @return True if this shard's buffer is full (and the shard can buffer reads).
*/
@Override
public boolean isBufferFull() {
return reads.size() > BlockDelimitedReadShardStrategy.MAX_READS;
}
@ -82,6 +83,7 @@ public class BlockDelimitedReadShard extends ReadShard implements BAMFormatAware
* Adds a read to the read buffer.
* @param read Add a read to the internal shard buffer.
*/
@Override
public void addRead(SAMRecord read) {
// DO NOT validate that the buffer is full. Paired read sharding will occasionally have to stuff another
// read or two into the buffer.
@ -92,10 +94,12 @@ public class BlockDelimitedReadShard extends ReadShard implements BAMFormatAware
* Creates an iterator over reads stored in this shard's read cache.
* @return
*/
@Override
public StingSAMIterator iterator() {
return StingSAMIteratorAdapter.adapt(sourceInfo,reads.iterator());
}
@Override
public SamRecordFilter getFilter() {
return filter;
}
@ -104,8 +108,9 @@ public class BlockDelimitedReadShard extends ReadShard implements BAMFormatAware
* Get the list of chunks delimiting this shard.
* @return a list of chunks that contain data for this shard.
*/
public Map<SAMReaderID,List<Chunk>> getChunks() {
return Collections.unmodifiableMap(chunks);
@Override
public Map<SAMReaderID,BAMFileSpan> getFileSpans() {
return Collections.unmodifiableMap(fileSpans);
}
/**
@ -123,14 +128,11 @@ public class BlockDelimitedReadShard extends ReadShard implements BAMFormatAware
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
for(Map.Entry<SAMReaderID,List<Chunk>> entry: chunks.entrySet()) {
for(Map.Entry<SAMReaderID,BAMFileSpan> entry: fileSpans.entrySet()) {
sb.append(entry.getKey());
sb.append(": ");
for(Chunk chunk : entry.getValue()) {
sb.append(chunk);
sb.append(' ');
}
sb.append(';');
sb.append(entry.getValue());
sb.append(' ');
}
return sb.toString();
}

View File

@ -55,7 +55,7 @@ public class BlockDelimitedReadShardStrategy extends ReadShardStrategy {
/**
* Ending position of the last shard in the file.
*/
private Map<SAMReaderID,Chunk> position;
private Map<SAMReaderID,BAMFileSpan> position;
/**
* Create a new read shard strategy, loading read shards from the given BAM file.
@ -103,26 +103,18 @@ public class BlockDelimitedReadShardStrategy extends ReadShardStrategy {
}
public void advance() {
Map<SAMReaderID,List<Chunk>> shardPosition = new HashMap<SAMReaderID,List<Chunk>>();
Map<SAMReaderID,BAMFileSpan> shardPosition = new HashMap<SAMReaderID,BAMFileSpan>();
nextShard = null;
SamRecordFilter filter = null;
if(locations != null) {
Map<SAMReaderID,List<Chunk>> selectedReaders = new HashMap<SAMReaderID,List<Chunk>>();
Map<SAMReaderID,BAMFileSpan> selectedReaders = new HashMap<SAMReaderID,BAMFileSpan>();
while(selectedReaders.size() == 0 && currentFilePointer != null) {
shardPosition = currentFilePointer.chunks;
shardPosition = currentFilePointer.fileSpans;
for(SAMReaderID id: shardPosition.keySet()) {
List<Chunk> chunks = shardPosition.get(id);
List<Chunk> selectedChunks = new ArrayList<Chunk>();
Chunk filePosition = position.get(id);
for(Chunk chunk: chunks)
if(filePosition.getChunkStart() <= chunk.getChunkStart())
selectedChunks.add(chunk);
else if(filePosition.getChunkStart() > chunk.getChunkStart() && filePosition.getChunkStart() < chunk.getChunkEnd()) {
selectedChunks.add(new Chunk(filePosition.getChunkStart(),chunk.getChunkEnd()));
}
if(selectedChunks.size() > 0)
selectedReaders.put(id,selectedChunks);
BAMFileSpan fileSpans = shardPosition.get(id).removeBefore(position.get(id));
if(!fileSpans.isEmpty())
selectedReaders.put(id,fileSpans);
}
if(selectedReaders.size() > 0) {
@ -141,13 +133,7 @@ public class BlockDelimitedReadShardStrategy extends ReadShardStrategy {
}
}
else {
// TODO: This level of processing should not be necessary.
shardPosition = new HashMap<SAMReaderID,List<Chunk>>();
for(Map.Entry<SAMReaderID,Chunk> entry: position.entrySet())
shardPosition.put(entry.getKey(),Collections.singletonList(entry.getValue()));
filter = null;
BAMFormatAwareShard shard = new BlockDelimitedReadShard(dataSource.getReadsInfo(),shardPosition,filter,Shard.ShardType.READ);
BAMFormatAwareShard shard = new BlockDelimitedReadShard(dataSource.getReadsInfo(),position,filter,Shard.ShardType.READ);
dataSource.fillShard(shard);
nextShard = !shard.isBufferEmpty() ? shard : null;
}

View File

@ -4,9 +4,8 @@ import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.StingException;
import org.broadinstitute.sting.gatk.iterators.StingSAMIterator;
import org.broadinstitute.sting.gatk.datasources.simpleDataSources.SAMReaderID;
import net.sf.samtools.Chunk;
import net.sf.samtools.SAMFileReader;
import net.sf.samtools.SAMRecord;
import net.sf.samtools.BAMFileSpan;
import net.sf.picard.filter.SamRecordFilter;
import java.util.List;
@ -45,7 +44,7 @@ public class IndexDelimitedLocusShard extends LocusShard implements BAMFormatAwa
/**
* A list of the chunks associated with this shard.
*/
private final Map<SAMReaderID,List<Chunk>> chunks;
private final Map<SAMReaderID, BAMFileSpan> fileSpans;
/**
* An IndexDelimitedLocusShard can be used either for LOCUS or LOCUS_INTERVAL shard types.
@ -56,23 +55,24 @@ public class IndexDelimitedLocusShard extends LocusShard implements BAMFormatAwa
/**
* Create a new locus shard, divided by index.
* @param intervals List of intervals to process.
* @param chunks Chunks associated with that interval.
* @param fileSpans File spans associated with that interval.
* @param shardType Type of the shard; must be either LOCUS or LOCUS_INTERVAL.
*/
IndexDelimitedLocusShard(List<GenomeLoc> intervals, Map<SAMReaderID,List<Chunk>> chunks, ShardType shardType) {
IndexDelimitedLocusShard(List<GenomeLoc> intervals, Map<SAMReaderID,BAMFileSpan> fileSpans, ShardType shardType) {
super(intervals);
this.chunks = chunks;
this.fileSpans = fileSpans;
if(shardType != ShardType.LOCUS && shardType != ShardType.LOCUS_INTERVAL)
throw new StingException("Attempted to create an IndexDelimitedLocusShard with invalid shard type: " + shardType);
this.shardType = shardType;
}
/**
* Gets the chunks associated with this locus shard.
* @return A list of the chunks to use when retrieving locus data.
* Gets the file spans associated with this locus shard.
* @return A list of the file spans to use when retrieving locus data.
*/
public Map<SAMReaderID,List<Chunk>> getChunks() {
return chunks;
@Override
public Map<SAMReaderID,BAMFileSpan> getFileSpans() {
return fileSpans;
}
/**
@ -80,36 +80,42 @@ public class IndexDelimitedLocusShard extends LocusShard implements BAMFormatAwa
* than just holding pointers to their locations.
* @return True if this shard can buffer reads. False otherwise.
*/
@Override
public boolean buffersReads() { return false; }
/**
* Returns true if the read buffer is currently full.
* @return True if this shard's buffer is full (and the shard can buffer reads).
*/
@Override
public boolean isBufferEmpty() { throw new UnsupportedOperationException("This shard does not buffer reads."); }
/**
* Returns true if the read buffer is currently full.
* @return True if this shard's buffer is full (and the shard can buffer reads).
*/
@Override
public boolean isBufferFull() { throw new UnsupportedOperationException("This shard does not buffer reads."); }
/**
* Adds a read to the read buffer.
* @param read Add a read to the internal shard buffer.
*/
@Override
public void addRead(SAMRecord read) { throw new UnsupportedOperationException("This shard does not buffer reads."); }
/**
* Gets the iterator over the elements cached in the shard.
* @return
*/
@Override
public StingSAMIterator iterator() { throw new UnsupportedOperationException("This shard does not buffer reads."); }
/**
* Gets a filter testing for overlap of this read with the given shard.
* @return A filter capable of filtering out reads outside a given shard.
*/
@Override
public SamRecordFilter getFilter() {
return new ReadOverlapFilter(loci);
}

View File

@ -11,9 +11,9 @@ import org.broadinstitute.sting.gatk.datasources.simpleDataSources.SAMReaderID;
import java.util.*;
import net.sf.samtools.Chunk;
import net.sf.samtools.SAMFileHeader;
import net.sf.samtools.SAMSequenceRecord;
import net.sf.samtools.BAMFileSpan;
/*
* Copyright (c) 2009 The Broad Institute
@ -126,8 +126,8 @@ public class IndexDelimitedLocusShardStrategy implements ShardStrategy {
*/
public IndexDelimitedLocusShard next() {
FilePointer nextFilePointer = filePointerIterator.next();
Map<SAMReaderID,List<Chunk>> chunksBounding = nextFilePointer.chunks != null ? nextFilePointer.chunks : null;
return new IndexDelimitedLocusShard(nextFilePointer.locations,chunksBounding,Shard.ShardType.LOCUS_INTERVAL);
Map<SAMReaderID, BAMFileSpan> fileSpansBounding = nextFilePointer.fileSpans != null ? nextFilePointer.fileSpans : null;
return new IndexDelimitedLocusShard(nextFilePointer.locations,fileSpansBounding,Shard.ShardType.LOCUS_INTERVAL);
}
/** we don't support the remove command */

View File

@ -172,7 +172,7 @@ public class IntervalSharder {
for(SAMReaderID id: dataSource.getReaderIDs()) {
PreloadedBAMFileIndex index = readerToIndexMap.get(id);
for(FilePointer filePointer: filePointers)
filePointer.addChunks(id,index.getChunksOverlapping(filePointer.overlap.getBin(id)));
filePointer.addFileSpans(id,index.getChunksOverlapping(filePointer.overlap.getBin(id)));
index.close();
}
@ -372,7 +372,7 @@ public class IntervalSharder {
* Represents a small section of a BAM file, and every associated interval.
*/
class FilePointer {
protected final Map<SAMReaderID,List<Chunk>> chunks = new HashMap<SAMReaderID,List<Chunk>>();
protected final Map<SAMReaderID,BAMFileSpan> fileSpans = new HashMap<SAMReaderID,BAMFileSpan>();
protected final String referenceSequence;
protected final BAMOverlap overlap;
protected final List<GenomeLoc> locations;
@ -393,8 +393,8 @@ class FilePointer {
locations.add(location);
}
public void addChunks(SAMReaderID id, List<Chunk> chunks) {
this.chunks.put(id,chunks);
public void addFileSpans(SAMReaderID id, BAMFileSpan fileSpan) {
this.fileSpans.put(id,fileSpan);
}
}

View File

@ -6,7 +6,6 @@ import org.broadinstitute.sting.gatk.arguments.ValidationExclusion;
import org.broadinstitute.sting.gatk.iterators.StingSAMIterator;
import org.broadinstitute.sting.gatk.iterators.StingSAMIteratorAdapter;
import org.broadinstitute.sting.utils.StingException;
import org.broadinstitute.sting.utils.GenomeLoc;
import net.sf.samtools.*;
import net.sf.samtools.util.CloseableIterator;
import net.sf.picard.sam.SamFileHeaderMerger;
@ -54,7 +53,7 @@ public class BlockDrivenSAMDataSource extends SAMDataSource {
/**
* How far along is each reader?
*/
private final Map<SAMReaderID,Chunk> readerPositions = new HashMap<SAMReaderID,Chunk>();
private final Map<SAMReaderID,BAMFileSpan> readerPositions = new HashMap<SAMReaderID,BAMFileSpan>();
/**
* Create a new block-aware SAM data source given the supplied read metadata.
@ -81,7 +80,7 @@ public class BlockDrivenSAMDataSource extends SAMDataSource {
}
initializeReaderPositions(readers);
SamFileHeaderMerger headerMerger = new SamFileHeaderMerger(readers.values(),SAMFileHeader.SortOrder.coordinate,true);
mergedHeader = headerMerger.getMergedHeader();
hasReadGroupCollisions = headerMerger.hasReadGroupCollisions();
@ -139,7 +138,7 @@ public class BlockDrivenSAMDataSource extends SAMDataSource {
* Retrieves the current position within the BAM file.
* @return A mapping of reader to current position.
*/
public Map<SAMReaderID,Chunk> getCurrentPosition() {
public Map<SAMReaderID,BAMFileSpan> getCurrentPosition() {
return readerPositions;
}
@ -183,7 +182,7 @@ public class BlockDrivenSAMDataSource extends SAMDataSource {
* @param read The read to add to the shard.
*/
private void addReadToBufferingShard(BAMFormatAwareShard shard,SAMReaderID id,SAMRecord read) {
Chunk endChunk = new Chunk(read.getCoordinates().getChunkEnd(),Long.MAX_VALUE);
BAMFileSpan endChunk = read.getFilePointer().getFilePointerFollowing();
shard.addRead(read);
readerPositions.put(id,endChunk);
}
@ -202,11 +201,15 @@ public class BlockDrivenSAMDataSource extends SAMDataSource {
throw new StingException("Unable to find id for reader associated with read " + read.getReadName());
}
/**
* Initialize the current reader positions
* @param readers
*/
private void initializeReaderPositions(SAMReaders readers) {
for(SAMReaderID id: getReaderIDs())
readerPositions.put(id,readers.getReader(id).getCurrentPosition());
readerPositions.put(id,readers.getReader(id).getStartOfDataSegment());
}
public StingSAMIterator seek(Shard shard) {
// todo: refresh monolithic sharding implementation
if(shard instanceof MonolithicShard)
@ -236,9 +239,9 @@ public class BlockDrivenSAMDataSource extends SAMDataSource {
private StingSAMIterator getIterator(SAMReaders readers, BAMFormatAwareShard shard, boolean enableVerification) {
Map<SAMFileReader,CloseableIterator<SAMRecord>> readerToIteratorMap = new HashMap<SAMFileReader,CloseableIterator<SAMRecord>>();
for(SAMReaderID id: getReaderIDs()) {
if(shard.getChunks().get(id) == null)
if(shard.getFileSpans().get(id) == null)
continue;
CloseableIterator<SAMRecord> iterator = readers.getReader(id).iterator(shard.getChunks().get(id));
CloseableIterator<SAMRecord> iterator = readers.getReader(id).iterator(shard.getFileSpans().get(id));
if(shard.getFilter() != null)
iterator = new FilteringIterator(iterator,shard.getFilter());
readerToIteratorMap.put(readers.getReader(id),iterator);

View File

@ -6,6 +6,7 @@ import org.apache.log4j.Logger;
import org.broadinstitute.sting.utils.*;
import org.broadinstitute.sting.utils.genotype.Variation;
import net.sf.picard.reference.ReferenceSequence;
import net.sf.picard.util.IntervalTree;
import net.sf.samtools.util.StringUtil;
import java.util.*;
@ -14,8 +15,6 @@ import java.io.IOException;
import java.io.ObjectOutputStream;
import java.io.ObjectInputStream;
import edu.mit.broad.picard.util.IntervalTree;
/**
* Created by IntelliJ IDEA.
* User: depristo

View File

@ -22,9 +22,9 @@ import java.io.File;
import net.sf.samtools.SAMRecord;
import net.sf.samtools.util.StringUtil;
import net.sf.picard.reference.ReferenceSequence;
import edu.mit.broad.picard.util.Interval;
import edu.mit.broad.picard.util.OverlapDetector;
import edu.mit.broad.picard.directed.IntervalList;
import net.sf.picard.util.Interval;
import net.sf.picard.util.OverlapDetector;
import net.sf.picard.util.IntervalList;
/**
* Given intervals corresponding to targets or baits in a hybrid selection experiment, this walker gives the following interval-by-interval data:

View File

@ -4,9 +4,6 @@ import org.broadinstitute.sting.utils.cmdLine.Argument;
import org.broadinstitute.sting.utils.genotype.vcf.*;
import edu.mit.broad.picard.util.Interval;
import java.io.*;
import java.util.*;

View File

@ -7,7 +7,6 @@ import org.broadinstitute.sting.utils.cmdLine.Argument;
import org.broadinstitute.sting.utils.genotype.vcf.*;
import edu.mit.broad.picard.util.Interval;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.Utils;

View File

@ -4,13 +4,11 @@ import org.broadinstitute.sting.utils.cmdLine.Argument;
import org.broadinstitute.sting.utils.genotype.vcf.*;
import edu.mit.broad.picard.util.Interval;
import java.io.*;
import java.util.*;
import net.sf.picard.PicardException;
import net.sf.picard.util.Interval;
import net.sf.picard.io.IoUtil;
import net.sf.picard.reference.ReferenceSequence;
import net.sf.picard.reference.ReferenceSequenceFile;

View File

@ -4,9 +4,6 @@ import org.broadinstitute.sting.utils.cmdLine.Argument;
import org.broadinstitute.sting.utils.genotype.vcf.*;
import edu.mit.broad.picard.util.Interval;
import java.io.*;
import java.util.*;
import java.util.zip.*;

View File

@ -4,7 +4,6 @@ import org.broadinstitute.sting.utils.cmdLine.Argument;
import org.broadinstitute.sting.utils.genotype.vcf.*;
import edu.mit.broad.picard.util.Interval;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.Utils;
@ -14,6 +13,7 @@ import java.io.*;
import java.util.*;
import net.sf.picard.PicardException;
import net.sf.picard.util.Interval;
import net.sf.picard.io.IoUtil;
import net.sf.picard.reference.ReferenceSequence;
import net.sf.picard.reference.ReferenceSequenceFile;

View File

@ -4,7 +4,6 @@ import org.broadinstitute.sting.utils.cmdLine.Argument;
import org.broadinstitute.sting.utils.genotype.vcf.*;
import edu.mit.broad.picard.util.Interval;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.Utils;

View File

@ -4,13 +4,11 @@ import org.broadinstitute.sting.utils.cmdLine.Argument;
import org.broadinstitute.sting.utils.genotype.vcf.*;
import edu.mit.broad.picard.util.Interval;
import java.io.*;
import java.util.*;
import net.sf.picard.PicardException;
import net.sf.picard.util.Interval;
import net.sf.picard.io.IoUtil;
import net.sf.picard.reference.ReferenceSequence;
import net.sf.picard.reference.ReferenceSequenceFile;

View File

@ -4,14 +4,12 @@ import org.broadinstitute.sting.utils.cmdLine.Argument;
import org.broadinstitute.sting.utils.genotype.vcf.*;
import edu.mit.broad.picard.util.Interval;
import java.io.*;
import java.util.*;
import java.lang.*;
import net.sf.picard.PicardException;
import net.sf.picard.util.Interval;
import net.sf.picard.io.IoUtil;
import net.sf.picard.reference.ReferenceSequence;
import net.sf.picard.reference.ReferenceSequenceFile;

View File

@ -4,7 +4,6 @@ import org.broadinstitute.sting.utils.cmdLine.Argument;
import org.broadinstitute.sting.utils.genotype.vcf.*;
import edu.mit.broad.picard.util.Interval;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser;
@ -14,6 +13,7 @@ import java.util.*;
import java.util.zip.*;
import net.sf.picard.PicardException;
import net.sf.picard.util.Interval;
import net.sf.picard.io.IoUtil;
import net.sf.picard.reference.ReferenceSequence;
import net.sf.picard.reference.ReferenceSequenceFile;

View File

@ -1,7 +1,7 @@
package org.broadinstitute.sting.utils;
import edu.mit.broad.picard.directed.IntervalList;
import edu.mit.broad.picard.util.Interval;
import net.sf.picard.util.IntervalList;
import net.sf.picard.util.Interval;
import net.sf.picard.reference.ReferenceSequenceFile;
import net.sf.samtools.SAMRecord;
import net.sf.samtools.SAMSequenceDictionary;

View File

@ -341,5 +341,5 @@ public class GATKSAMRecord extends SAMRecord {
public String toString() { return mRecord.toString(); }
public Chunk getCoordinates() { return mRecord.getCoordinates(); }
public BAMFileSpan getFilePointer() { return mRecord.getFilePointer(); }
}

View File

@ -2,16 +2,12 @@
<package name="picard-private-parts">
<executable name="picard-private-parts">
<dependencies>
<class name="edu.mit.broad.picard.directed.IntervalList" />
<class name="edu.mit.broad.picard.genotype.DiploidGenotype" />
<class name="edu.mit.broad.picard.genotype.geli.GeliFileReader" />
<class name="edu.mit.broad.picard.genotype.geli.GeliFileWriter" />
<class name="edu.mit.broad.picard.genotype.geli.GenotypeLikelihoods" />
<class name="edu.mit.broad.picard.util.BasicTextFileParser" />
<class name="edu.mit.broad.picard.illumina.parser.IlluminaUtil" />
<class name="edu.mit.broad.picard.util.Interval" />
<class name="edu.mit.broad.picard.util.IntervalTree" />
<class name="edu.mit.broad.picard.util.OverlapDetector" />
<class name="edu.mit.broad.picard.util.PasteParser" />
<class name="edu.mit.broad.picard.variation.KnownVariantCodecV2" />
<class name="edu.mit.broad.picard.variation.KnownVariantCodec" />

View File

@ -1,3 +1,3 @@
<ivy-module version="1.0">
<info organisation="edu.mit.broad" module="picard-private-parts" revision="1333-sharding-4" status="integration" publication="20100324095800" />
<info organisation="edu.mit.broad" module="picard-private-parts" revision="1333-sharding-5" status="integration" publication="20100326102100" />
</ivy-module>

View File

@ -1,3 +1,3 @@
<ivy-module version="1.0">
<info organisation="net.sf" module="picard" revision="1.16.364-sharding" status="release" />
<info organisation="net.sf" module="picard" revision="1.16.365-sharding" status="release" />
</ivy-module>

View File

@ -1,3 +1,3 @@
<ivy-module version="1.0">
<info organisation="net.sf" module="sam" revision="1.16.364-sharding" status="release" />
<info organisation="net.sf" module="sam" revision="1.16.365-sharding" status="release" />
</ivy-module>