Merge pull request #381 from broadinstitute/mm_rev_picard_to_get_tribble_updates

Adaptations to accomodate Tribble API changes.
This commit is contained in:
Eric Banks 2013-08-19 18:31:02 -07:00
commit 6663d48ffe
27 changed files with 175 additions and 169 deletions

View File

@ -47,7 +47,7 @@
package org.broadinstitute.sting.gatk.walkers.haplotypecaller; package org.broadinstitute.sting.gatk.walkers.haplotypecaller;
import net.sf.picard.reference.IndexedFastaSequenceFile; import net.sf.picard.reference.IndexedFastaSequenceFile;
import org.broad.tribble.readers.AsciiLineReader; import org.broad.tribble.readers.LineIterator;
import org.broad.tribble.readers.PositionalBufferedStream; import org.broad.tribble.readers.PositionalBufferedStream;
import org.broadinstitute.sting.WalkerTest; import org.broadinstitute.sting.WalkerTest;
import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLoc;
@ -190,9 +190,9 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest {
// confirm that the call is the correct one // confirm that the call is the correct one
final VCFCodec codec = new VCFCodec(); final VCFCodec codec = new VCFCodec();
final FileInputStream s = new FileInputStream(outputVCF); final FileInputStream s = new FileInputStream(outputVCF);
final AsciiLineReader lineReader = new AsciiLineReader(new PositionalBufferedStream(s)); final LineIterator lineIterator = codec.makeSourceFromStream(new PositionalBufferedStream(s));
codec.readHeader(lineReader); codec.readHeader(lineIterator);
final String line = lineReader.readLine(); final String line = lineIterator.next();
Assert.assertFalse(line == null); Assert.assertFalse(line == null);
final VariantContext vc = codec.decode(line); final VariantContext vc = codec.decode(line);
Assert.assertTrue(vc.isBiallelic()); Assert.assertTrue(vc.isBiallelic());

View File

@ -46,16 +46,20 @@
package org.broadinstitute.sting.gatk.walkers.variantutils; package org.broadinstitute.sting.gatk.walkers.variantutils;
import org.broad.tribble.readers.AsciiLineReader;
import org.broad.tribble.readers.PositionalBufferedStream; import org.broad.tribble.readers.PositionalBufferedStream;
import org.broadinstitute.sting.utils.SampleUtils; import org.broadinstitute.sting.utils.SampleUtils;
import org.broadinstitute.variant.vcf.*; import org.broadinstitute.variant.vcf.VCFCodec;
import org.broadinstitute.variant.vcf.VCFHeader;
import org.broadinstitute.variant.vcf.VCFHeaderLine;
import org.broadinstitute.variant.vcf.VCFUtils;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.Test; import org.testng.annotations.Test;
import java.io.StringBufferInputStream; import java.io.StringBufferInputStream;
import java.util.*; import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/** /**
* test out pieces of the combine variants code * test out pieces of the combine variants code
@ -154,7 +158,8 @@ public class CombineVariantsUnitTest {
private VCFHeader createHeader(String headerStr) { private VCFHeader createHeader(String headerStr) {
VCFCodec codec = new VCFCodec(); VCFCodec codec = new VCFCodec();
VCFHeader head = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(headerStr)))); VCFHeader head = null;
head = (VCFHeader) codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(headerStr))));
return head; return head;
} }

View File

@ -46,25 +46,20 @@
package org.broadinstitute.sting.gatk.walkers.variantutils; package org.broadinstitute.sting.gatk.walkers.variantutils;
import net.sf.picard.reference.ReferenceSequenceFile;
import org.broad.tribble.readers.PositionalBufferedStream;
import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.BaseTest;
import org.broadinstitute.sting.utils.GenomeLocParser; import org.broadinstitute.sting.utils.BaseUtils;
import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.sting.utils.collections.Pair;
import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile; import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile;
import org.broadinstitute.sting.utils.BaseUtils; import org.broadinstitute.variant.variantcontext.*;
import org.broadinstitute.variant.variantcontext.Allele;
import org.broadinstitute.variant.variantcontext.Genotype;
import org.broadinstitute.variant.variantcontext.GenotypeBuilder;
import org.broadinstitute.variant.variantcontext.GenotypeType;
import org.broadinstitute.variant.variantcontext.VariantContext;
import org.broadinstitute.variant.variantcontext.VariantContextBuilder;
import org.broadinstitute.variant.vcf.VCFCodec; import org.broadinstitute.variant.vcf.VCFCodec;
import org.broadinstitute.variant.vcf.VCFHeader; import org.broadinstitute.variant.vcf.VCFHeader;
import org.testng.annotations.Test;
import org.broad.tribble.readers.AsciiLineReader;
import org.broad.tribble.readers.PositionalBufferedStream;
import org.testng.Assert; import org.testng.Assert;
import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.File; import java.io.File;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
@ -72,7 +67,6 @@ import java.io.StringBufferInputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import net.sf.picard.reference.ReferenceSequenceFile;
public class ConcordanceMetricsUnitTest extends BaseTest { public class ConcordanceMetricsUnitTest extends BaseTest {
@ -139,8 +133,8 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
VariantContext eval = data.getFirst(); VariantContext eval = data.getFirst();
VariantContext truth = data.getSecond(); VariantContext truth = data.getSecond();
VCFCodec codec = new VCFCodec(); VCFCodec codec = new VCFCodec();
VCFHeader evalHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER)))); VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
VCFHeader compHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER)))); VCFHeader compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader); ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader);
metrics.update(eval,truth); metrics.update(eval,truth);
Assert.assertEquals(eval.getGenotype("test1_sample2").getType().ordinal(), 2); Assert.assertEquals(eval.getGenotype("test1_sample2").getType().ordinal(), 2);
@ -189,8 +183,8 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
VariantContext eval = data.getFirst(); VariantContext eval = data.getFirst();
VariantContext truth = data.getSecond(); VariantContext truth = data.getSecond();
VCFCodec codec = new VCFCodec(); VCFCodec codec = new VCFCodec();
VCFHeader evalHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER)))); VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
VCFHeader compHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER)))); VCFHeader compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader); ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader);
metrics.update(eval,truth); metrics.update(eval,truth);
Assert.assertEquals(eval.getGenotype("test1_sample2").getType().ordinal(), 2); Assert.assertEquals(eval.getGenotype("test1_sample2").getType().ordinal(), 2);
@ -209,8 +203,8 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
eval = data.getSecond(); eval = data.getSecond();
truth = data.getFirst(); truth = data.getFirst();
codec = new VCFCodec(); codec = new VCFCodec();
evalHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER)))); evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
compHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER)))); compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
metrics = new ConcordanceMetrics(evalHeader,compHeader); metrics = new ConcordanceMetrics(evalHeader,compHeader);
metrics.update(eval,truth); metrics.update(eval,truth);
Assert.assertEquals(eval.getGenotype("test1_sample2").getType().ordinal(), 2); Assert.assertEquals(eval.getGenotype("test1_sample2").getType().ordinal(), 2);
@ -264,8 +258,8 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
VariantContext eval = data.getFirst(); VariantContext eval = data.getFirst();
VariantContext truth = data.getSecond(); VariantContext truth = data.getSecond();
VCFCodec codec = new VCFCodec(); VCFCodec codec = new VCFCodec();
VCFHeader evalHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER)))); VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
VCFHeader compHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER)))); VCFHeader compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader); ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader);
metrics.update(eval,truth); metrics.update(eval,truth);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample1").getnMismatchingAlt(),1); Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample1").getnMismatchingAlt(),1);
@ -317,8 +311,8 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
VariantContext eval = data.getFirst(); VariantContext eval = data.getFirst();
VariantContext truth = data.getSecond(); VariantContext truth = data.getSecond();
VCFCodec codec = new VCFCodec(); VCFCodec codec = new VCFCodec();
VCFHeader evalHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER)))); VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
VCFHeader compHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER)))); VCFHeader compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader); ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader);
metrics.update(eval,truth); metrics.update(eval,truth);
Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample2").getnMismatchingAlt(),0); Assert.assertEquals(metrics.getGenotypeConcordance("test1_sample2").getnMismatchingAlt(),0);
@ -366,8 +360,8 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
VariantContext eval = data.getFirst(); VariantContext eval = data.getFirst();
VariantContext truth = data.getSecond(); VariantContext truth = data.getSecond();
VCFCodec codec = new VCFCodec(); VCFCodec codec = new VCFCodec();
VCFHeader evalHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER)))); VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
VCFHeader compHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER)))); VCFHeader compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader); ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader);
metrics.update(eval,truth); metrics.update(eval,truth);
Assert.assertTrue(eval.getGenotype("test1_sample2").getType().equals(GenotypeType.UNAVAILABLE)); Assert.assertTrue(eval.getGenotype("test1_sample2").getType().equals(GenotypeType.UNAVAILABLE));
@ -520,8 +514,8 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
List<Pair<VariantContext,VariantContext>> data = getData6(); List<Pair<VariantContext,VariantContext>> data = getData6();
VCFCodec codec = new VCFCodec(); VCFCodec codec = new VCFCodec();
VCFHeader evalHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_2_HEADER)))); VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_2_HEADER))));
VCFHeader compHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_2_HEADER)))); VCFHeader compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_2_HEADER))));
ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader); ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader);
for ( Pair<VariantContext,VariantContext> contextPair : data ) { for ( Pair<VariantContext,VariantContext> contextPair : data ) {
@ -554,8 +548,8 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
VariantContext eval = data.getFirst(); VariantContext eval = data.getFirst();
VariantContext truth = data.getSecond(); VariantContext truth = data.getSecond();
VCFCodec codec = new VCFCodec(); VCFCodec codec = new VCFCodec();
VCFHeader evalHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER)))); VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
VCFHeader compHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER)))); VCFHeader compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_1_HEADER))));
ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader); ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader);
int[][] table = metrics.getOverallGenotypeConcordance().getTable(); int[][] table = metrics.getOverallGenotypeConcordance().getTable();
// set up the table // set up the table
@ -588,9 +582,9 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
@Test(enabled=true) @Test(enabled=true)
public void testRobustness() { public void testRobustness() {
VCFCodec codec = new VCFCodec(); VCFCodec codec = new VCFCodec();
VCFHeader evalHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_3_HEADER_1)))); VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_3_HEADER_1))));
VCFHeader disjointCompHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_3_HEADER_2)))); VCFHeader disjointCompHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_3_HEADER_2))));
VCFHeader overlapCompHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_3_HEADER_3)))); VCFHeader overlapCompHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_3_HEADER_3))));
ConcordanceMetrics disjointMetrics = new ConcordanceMetrics(evalHeader,disjointCompHeader); ConcordanceMetrics disjointMetrics = new ConcordanceMetrics(evalHeader,disjointCompHeader);
ConcordanceMetrics overlapMetrics = new ConcordanceMetrics(evalHeader,overlapCompHeader); ConcordanceMetrics overlapMetrics = new ConcordanceMetrics(evalHeader,overlapCompHeader);
@ -720,8 +714,8 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
@Test(enabled = true) @Test(enabled = true)
public void testSites() { public void testSites() {
VCFCodec codec = new VCFCodec(); VCFCodec codec = new VCFCodec();
VCFHeader evalHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_2_HEADER)))); VCFHeader evalHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_2_HEADER))));
VCFHeader compHeader = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(TEST_2_HEADER)))); VCFHeader compHeader = (VCFHeader)codec.readActualHeader(codec.makeSourceFromStream(new PositionalBufferedStream(new StringBufferInputStream(TEST_2_HEADER))));
ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader); ConcordanceMetrics metrics = new ConcordanceMetrics(evalHeader,compHeader);
List<Pair<VariantContext,VariantContext>> data = getData7(); List<Pair<VariantContext,VariantContext>> data = getData7();

View File

@ -28,17 +28,18 @@ package org.broadinstitute.sting.gatk.io.storage;
import net.sf.samtools.util.BlockCompressedOutputStream; import net.sf.samtools.util.BlockCompressedOutputStream;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.broad.tribble.AbstractFeatureReader; import org.broad.tribble.AbstractFeatureReader;
import org.broad.tribble.Feature;
import org.broad.tribble.FeatureCodec; import org.broad.tribble.FeatureCodec;
import org.broadinstitute.sting.gatk.io.stubs.VariantContextWriterStub; import org.broadinstitute.sting.gatk.io.stubs.VariantContextWriterStub;
import org.broadinstitute.sting.gatk.refdata.tracks.FeatureManager; import org.broadinstitute.sting.gatk.refdata.tracks.FeatureManager;
import org.broadinstitute.variant.bcf2.BCF2Utils;
import org.broadinstitute.variant.vcf.VCFHeader;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.exceptions.UserException;
import org.broadinstitute.variant.bcf2.BCF2Utils;
import org.broadinstitute.variant.variantcontext.VariantContext; import org.broadinstitute.variant.variantcontext.VariantContext;
import org.broadinstitute.variant.variantcontext.writer.Options; import org.broadinstitute.variant.variantcontext.writer.Options;
import org.broadinstitute.variant.variantcontext.writer.VariantContextWriter; import org.broadinstitute.variant.variantcontext.writer.VariantContextWriter;
import org.broadinstitute.variant.variantcontext.writer.VariantContextWriterFactory; import org.broadinstitute.variant.variantcontext.writer.VariantContextWriterFactory;
import org.broadinstitute.variant.vcf.VCFHeader;
import java.io.*; import java.io.*;
import java.util.Arrays; import java.util.Arrays;
@ -205,12 +206,11 @@ public class VariantContextWriterStorage implements Storage<VariantContextWriter
if ( fd == null ) if ( fd == null )
throw new UserException.LocalParallelizationProblem(file); throw new UserException.LocalParallelizationProblem(file);
final FeatureCodec<VariantContext> codec = fd.getCodec(); final FeatureCodec codec = fd.getCodec();
final AbstractFeatureReader<VariantContext> source = final AbstractFeatureReader<Feature, ?> source = AbstractFeatureReader.getFeatureReader(file.getAbsolutePath(), codec, false);
AbstractFeatureReader.getFeatureReader(file.getAbsolutePath(), codec, false);
for ( final VariantContext vc : source.iterator() ) { for ( final Feature vc : source.iterator() ) {
target.writer.add(vc); target.writer.add((VariantContext) vc);
} }
source.close(); source.close();

View File

@ -51,9 +51,8 @@ package org.broadinstitute.sting.utils.codecs.beagle;
import org.broad.tribble.AsciiFeatureCodec; import org.broad.tribble.AsciiFeatureCodec;
import org.broad.tribble.Feature;
import org.broad.tribble.exception.CodecLineParsingException; import org.broad.tribble.exception.CodecLineParsingException;
import org.broad.tribble.readers.LineReader; import org.broad.tribble.readers.LineIterator;
import org.broadinstitute.sting.gatk.refdata.ReferenceDependentFeatureCodec; import org.broadinstitute.sting.gatk.refdata.ReferenceDependentFeatureCodec;
import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser; import org.broadinstitute.sting.utils.GenomeLocParser;
@ -131,8 +130,8 @@ public class BeagleCodec extends AsciiFeatureCodec<BeagleFeature> implements Ref
this.genomeLocParser = genomeLocParser; this.genomeLocParser = genomeLocParser;
} }
public Object readHeader(LineReader reader) @Override
{ public Object readActualHeader(LineIterator reader) {
int[] lineCounter = new int[1]; int[] lineCounter = new int[1];
try { try {
header = readHeader(reader, lineCounter); header = readHeader(reader, lineCounter);
@ -181,14 +180,14 @@ public class BeagleCodec extends AsciiFeatureCodec<BeagleFeature> implements Ref
return header; return header;
} }
private static String[] readHeader(final LineReader source, int[] lineCounter) throws IOException { private static String[] readHeader(final LineIterator source, int[] lineCounter) throws IOException {
String[] header = null; String[] header = null;
int numLines = 0; int numLines = 0;
//find the 1st line that's non-empty and not a comment //find the 1st line that's non-empty and not a comment
String line; while(source.hasNext()) {
while( (line = source.readLine()) != null ) { final String line = source.next();
numLines++; numLines++;
if ( line.trim().isEmpty() ) { if ( line.trim().isEmpty() ) {
continue; continue;

View File

@ -28,9 +28,7 @@ package org.broadinstitute.sting.utils.codecs.hapmap;
import org.broad.tribble.AsciiFeatureCodec; import org.broad.tribble.AsciiFeatureCodec;
import org.broad.tribble.FeatureCodecHeader; import org.broad.tribble.FeatureCodecHeader;
import org.broad.tribble.annotation.Strand; import org.broad.tribble.annotation.Strand;
import org.broad.tribble.readers.AsciiLineReader; import org.broad.tribble.readers.LineIterator;
import org.broad.tribble.readers.LineReader;
import org.broad.tribble.readers.PositionalBufferedStream;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
@ -112,18 +110,16 @@ public class RawHapMapCodec extends AsciiFeatureCodec<RawHapMapFeature> {
headerLine); headerLine);
} }
public Object readHeader(LineReader reader) { @Override
try { public Object readActualHeader(final LineIterator lineIterator) {
headerLine = reader.readLine(); this.headerLine = lineIterator.next();
} catch (IOException e) {
throw new IllegalArgumentException("Unable to read a line from the line reader");
}
return headerLine; return headerLine;
} }
@Override @Override
public FeatureCodecHeader readHeader(final PositionalBufferedStream stream) throws IOException { public FeatureCodecHeader readHeader(final LineIterator lineIterator) throws IOException {
final AsciiLineReader br = new AsciiLineReader(stream); final String header = (String) readActualHeader(lineIterator);
return new FeatureCodecHeader(readHeader(br), br.getPosition()); // TODO: This approach may cause issues with files formatted with \r\n-style line-endings.
return new FeatureCodecHeader(header, header.length() + 1);
} }
} }

View File

@ -28,6 +28,7 @@ package org.broadinstitute.sting.utils.codecs.refseq;
import org.broad.tribble.AsciiFeatureCodec; import org.broad.tribble.AsciiFeatureCodec;
import org.broad.tribble.Feature; import org.broad.tribble.Feature;
import org.broad.tribble.TribbleException; import org.broad.tribble.TribbleException;
import org.broad.tribble.readers.LineIterator;
import org.broadinstitute.sting.gatk.refdata.ReferenceDependentFeatureCodec; import org.broadinstitute.sting.gatk.refdata.ReferenceDependentFeatureCodec;
import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser; import org.broadinstitute.sting.utils.GenomeLocParser;
@ -93,7 +94,8 @@ public class RefSeqCodec extends AsciiFeatureCodec<RefSeqFeature> implements Ref
} }
@Override @Override
public Feature decodeLoc(String line) { public Feature decodeLoc(final LineIterator lineIterator) {
final String line = lineIterator.next();
if (line.startsWith("#")) return null; if (line.startsWith("#")) return null;
String fields[] = line.split("\t"); String fields[] = line.split("\t");
if (fields.length < 3) throw new TribbleException("RefSeq (decodeLoc) : Unable to parse line -> " + line + ", we expected at least 3 columns, we saw " + fields.length); if (fields.length < 3) throw new TribbleException("RefSeq (decodeLoc) : Unable to parse line -> " + line + ", we expected at least 3 columns, we saw " + fields.length);
@ -160,4 +162,10 @@ public class RefSeqCodec extends AsciiFeatureCodec<RefSeqFeature> implements Ref
feature.setExon_frames(exon_frames); feature.setExon_frames(exon_frames);
return feature; return feature;
} }
@Override
public Object readActualHeader(LineIterator lineIterator) {
// No header for this format
return null;
}
} }

View File

@ -26,8 +26,8 @@
package org.broadinstitute.sting.utils.codecs.sampileup; package org.broadinstitute.sting.utils.codecs.sampileup;
import org.broad.tribble.AsciiFeatureCodec; import org.broad.tribble.AsciiFeatureCodec;
import org.broad.tribble.Feature;
import org.broad.tribble.exception.CodecLineParsingException; import org.broad.tribble.exception.CodecLineParsingException;
import org.broad.tribble.readers.LineIterator;
import org.broad.tribble.util.ParsingUtils; import org.broad.tribble.util.ParsingUtils;
import java.util.ArrayList; import java.util.ArrayList;
@ -163,6 +163,12 @@ public class SAMPileupCodec extends AsciiFeatureCodec<SAMPileupFeature> {
return feature; return feature;
} }
@Override
public Object readActualHeader(LineIterator lineIterator) {
// No header for this format
return null;
}
private void parseIndels(String genotype,SAMPileupFeature feature) { private void parseIndels(String genotype,SAMPileupFeature feature) {
String [] obs = genotype.split("/"); // get observations, now need to tinker with them a bit String [] obs = genotype.split("/"); // get observations, now need to tinker with them a bit

View File

@ -29,8 +29,8 @@ import net.sf.samtools.Cigar;
import net.sf.samtools.TextCigarCodec; import net.sf.samtools.TextCigarCodec;
import net.sf.samtools.util.StringUtil; import net.sf.samtools.util.StringUtil;
import org.broad.tribble.AsciiFeatureCodec; import org.broad.tribble.AsciiFeatureCodec;
import org.broad.tribble.Feature;
import org.broad.tribble.exception.CodecLineParsingException; import org.broad.tribble.exception.CodecLineParsingException;
import org.broad.tribble.readers.LineIterator;
import org.broad.tribble.util.ParsingUtils; import org.broad.tribble.util.ParsingUtils;
/** /**
@ -114,4 +114,10 @@ public class SAMReadCodec extends AsciiFeatureCodec<SAMReadFeature> {
bases, bases,
qualities); qualities);
} }
@Override
public Object readActualHeader(LineIterator lineIterator) {
// No header for this format
return null;
}
} }

View File

@ -26,14 +26,14 @@
package org.broadinstitute.sting.utils.codecs.table; package org.broadinstitute.sting.utils.codecs.table;
import org.broad.tribble.AsciiFeatureCodec; import org.broad.tribble.AsciiFeatureCodec;
import org.broad.tribble.readers.LineReader; import org.broad.tribble.readers.LineIterator;
import org.broadinstitute.sting.gatk.refdata.ReferenceDependentFeatureCodec; import org.broadinstitute.sting.gatk.refdata.ReferenceDependentFeatureCodec;
import org.broadinstitute.sting.utils.GenomeLocParser; import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.exceptions.UserException;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
/** /**
* Reads tab deliminated tabular text files * Reads tab deliminated tabular text files
@ -97,30 +97,29 @@ public class TableCodec extends AsciiFeatureCodec<TableFeature> implements Refer
String[] split = line.split(delimiterRegex); String[] split = line.split(delimiterRegex);
if (split.length < 1) if (split.length < 1)
throw new IllegalArgumentException("TableCodec line = " + line + " doesn't appear to be a valid table format"); throw new IllegalArgumentException("TableCodec line = " + line + " doesn't appear to be a valid table format");
return new TableFeature(genomeLocParser.parseGenomeLoc(split[0]),Arrays.asList(split),header); return new TableFeature(genomeLocParser.parseGenomeLoc(split[0]),Arrays.asList(split), header);
} }
@Override @Override
public Object readHeader(LineReader reader) { public Object readActualHeader(final LineIterator reader) {
String line = ""; boolean isFirst = true;
try { while (reader.hasNext()) {
boolean isFirst = true; final String line = reader.peek(); // Peek to avoid reading non-header data
while ((line = reader.readLine()) != null) { if ( isFirst && ! line.startsWith(headerDelimiter) && ! line.startsWith(commentDelimiter)) {
if ( isFirst && ! line.startsWith(headerDelimiter) && ! line.startsWith(commentDelimiter)) { throw new UserException.MalformedFile("TableCodec file does not have a header");
throw new UserException.MalformedFile("TableCodec file does not have a header"); }
} isFirst &= line.startsWith(commentDelimiter);
isFirst &= line.startsWith(commentDelimiter); if (line.startsWith(headerDelimiter)) {
if (line.startsWith(headerDelimiter)) { reader.next(); // "Commit" the peek
if (header.size() > 0) throw new IllegalStateException("Input table file seems to have two header lines. The second is = " + line); if (header.size() > 0) throw new IllegalStateException("Input table file seems to have two header lines. The second is = " + line);
String spl[] = line.split(delimiterRegex); final String spl[] = line.split(delimiterRegex);
for (String s : spl) header.add(s); Collections.addAll(header, spl);
return header; return header;
} else if (!line.startsWith(commentDelimiter)) { } else if (line.startsWith(commentDelimiter)) {
break; reader.next(); // "Commit" the peek
} } else {
break;
} }
} catch (IOException e) {
throw new UserException.MalformedFile("unable to parse header from TableCodec file",e);
} }
return header; return header;
} }

View File

@ -28,6 +28,7 @@ package org.broadinstitute.sting.utils.variant;
import org.broad.tribble.Feature; import org.broad.tribble.Feature;
import org.broad.tribble.FeatureCodec; import org.broad.tribble.FeatureCodec;
import org.broad.tribble.FeatureCodecHeader; import org.broad.tribble.FeatureCodecHeader;
import org.broad.tribble.readers.LineIterator;
import org.broad.tribble.readers.PositionalBufferedStream; import org.broad.tribble.readers.PositionalBufferedStream;
import org.broadinstitute.sting.commandline.RodBinding; import org.broadinstitute.sting.commandline.RodBinding;
import org.broadinstitute.sting.gatk.CommandLineGATK; import org.broadinstitute.sting.gatk.CommandLineGATK;
@ -177,31 +178,26 @@ public class GATKVCFUtils {
/** /**
* Utility class to read all of the VC records from a file * Utility class to read all of the VC records from a file
* *
* @param source * @param file
* @param codec * @param codec
* @return * @return
* @throws IOException * @throws IOException
*/ */
public final static Pair<VCFHeader, VCIterable> readAllVCs( final File source, final FeatureCodec<VariantContext> codec ) throws IOException { public final static <SOURCE> Pair<VCFHeader, VCIterable<SOURCE>> readAllVCs( final File file, final FeatureCodec<VariantContext, SOURCE> codec) throws IOException {
// read in the features // read in the features
PositionalBufferedStream pbs = new PositionalBufferedStream(new FileInputStream(source)); SOURCE source = codec.makeSourceFromStream(new FileInputStream(file));
FeatureCodecHeader header = codec.readHeader(pbs); FeatureCodecHeader header = codec.readHeader(source);
pbs.close();
pbs = new PositionalBufferedStream(new FileInputStream(source));
pbs.skip(header.getHeaderEnd());
final VCFHeader vcfHeader = (VCFHeader)header.getHeaderValue(); final VCFHeader vcfHeader = (VCFHeader)header.getHeaderValue();
return new Pair<VCFHeader, VCIterable>(vcfHeader, new VCIterable(pbs, codec, vcfHeader)); return new Pair<>(vcfHeader, new VCIterable<>(source, codec, vcfHeader));
} }
public static class VCIterable implements Iterable<VariantContext>, Iterator<VariantContext> { public static class VCIterable<SOURCE> implements Iterable<VariantContext>, Iterator<VariantContext> {
final PositionalBufferedStream pbs; final SOURCE source;
final FeatureCodec<VariantContext> codec; final FeatureCodec<VariantContext, SOURCE> codec;
final VCFHeader header; final VCFHeader header;
private VCIterable(final PositionalBufferedStream pbs, final FeatureCodec<VariantContext> codec, final VCFHeader header) { private VCIterable(final SOURCE source, final FeatureCodec<VariantContext, SOURCE> codec, final VCFHeader header) {
this.pbs = pbs; this.source = source;
this.codec = codec; this.codec = codec;
this.header = header; this.header = header;
} }
@ -213,17 +209,13 @@ public class GATKVCFUtils {
@Override @Override
public boolean hasNext() { public boolean hasNext() {
try { return ! codec.isDone(source);
return ! pbs.isDone();
} catch ( IOException e ) {
throw new RuntimeException(e);
}
} }
@Override @Override
public VariantContext next() { public VariantContext next() {
try { try {
final VariantContext vc = codec.decode(pbs); final VariantContext vc = codec.decode(source);
return vc == null ? null : vc.fullyDecode(header, false); return vc == null ? null : vc.fullyDecode(header, false);
} catch ( IOException e ) { } catch ( IOException e ) {
throw new RuntimeException(e); throw new RuntimeException(e);
@ -249,20 +241,19 @@ public class GATKVCFUtils {
final List<VariantContext> vcs = new ArrayList<VariantContext>(); final List<VariantContext> vcs = new ArrayList<VariantContext>();
final VCFCodec codec = new VCFCodec(); final VCFCodec codec = new VCFCodec();
PositionalBufferedStream pbs = new PositionalBufferedStream(new FileInputStream(source)); PositionalBufferedStream pbs = new PositionalBufferedStream(new FileInputStream(source));
FeatureCodecHeader header = codec.readHeader(pbs); final LineIterator vcfSource = codec.makeSourceFromStream(pbs);
pbs.close(); try {
final VCFHeader vcfHeader = (VCFHeader) codec.readActualHeader(vcfSource);
pbs = new PositionalBufferedStream(new FileInputStream(source)); while (vcfSource.hasNext()) {
pbs.skip(header.getHeaderEnd()); final VariantContext vc = codec.decode(vcfSource);
if ( vc != null )
vcs.add(vc);
}
final VCFHeader vcfHeader = (VCFHeader)header.getHeaderValue(); return new Pair<VCFHeader, List<VariantContext>>(vcfHeader, vcs);
} finally {
while ( ! pbs.isDone() ) { codec.close(vcfSource);
final VariantContext vc = codec.decode(pbs);
if ( vc != null )
vcs.add(vc);
} }
return new Pair<VCFHeader, List<VariantContext>>(vcfHeader, vcs);
} }
} }

View File

@ -30,6 +30,8 @@ import org.apache.log4j.Level;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.log4j.PatternLayout; import org.apache.log4j.PatternLayout;
import org.apache.log4j.spi.LoggingEvent; import org.apache.log4j.spi.LoggingEvent;
import org.broad.tribble.readers.LineIterator;
import org.broad.tribble.readers.PositionalBufferedStream;
import org.broadinstitute.sting.commandline.CommandLineUtils; import org.broadinstitute.sting.commandline.CommandLineUtils;
import org.broadinstitute.sting.utils.collections.Pair; import org.broadinstitute.sting.utils.collections.Pair;
import org.broadinstitute.sting.utils.crypt.CryptUtils; import org.broadinstitute.sting.utils.crypt.CryptUtils;
@ -450,8 +452,8 @@ public abstract class BaseTest {
} }
public static void assertVCFandBCFFilesAreTheSame(final File vcfFile, final File bcfFile) throws IOException { public static void assertVCFandBCFFilesAreTheSame(final File vcfFile, final File bcfFile) throws IOException {
final Pair<VCFHeader, GATKVCFUtils.VCIterable> vcfData = GATKVCFUtils.readAllVCs(vcfFile, new VCFCodec()); final Pair<VCFHeader, GATKVCFUtils.VCIterable<LineIterator>> vcfData = GATKVCFUtils.readAllVCs(vcfFile, new VCFCodec());
final Pair<VCFHeader, GATKVCFUtils.VCIterable> bcfData = GATKVCFUtils.readAllVCs(bcfFile, new BCF2Codec()); final Pair<VCFHeader, GATKVCFUtils.VCIterable<PositionalBufferedStream>> bcfData = GATKVCFUtils.readAllVCs(bcfFile, new BCF2Codec());
assertVCFHeadersAreEqual(bcfData.getFirst(), vcfData.getFirst()); assertVCFHeadersAreEqual(bcfData.getFirst(), vcfData.getFirst());
assertVariantContextStreamsAreEqual(bcfData.getSecond(), vcfData.getSecond()); assertVariantContextStreamsAreEqual(bcfData.getSecond(), vcfData.getSecond());
} }

View File

@ -28,11 +28,9 @@ package org.broadinstitute.sting.gatk;
import net.sf.samtools.SAMFileReader; import net.sf.samtools.SAMFileReader;
import net.sf.samtools.SAMReadGroupRecord; import net.sf.samtools.SAMReadGroupRecord;
import net.sf.samtools.SAMRecord; import net.sf.samtools.SAMRecord;
import org.broad.tribble.readers.AsciiLineReader;
import org.broadinstitute.sting.WalkerTest; import org.broadinstitute.sting.WalkerTest;
import org.broadinstitute.sting.commandline.Argument; import org.broadinstitute.sting.commandline.Argument;
import org.broadinstitute.sting.commandline.Output; import org.broadinstitute.sting.commandline.Output;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.filters.MappingQualityUnavailableFilter; import org.broadinstitute.sting.gatk.filters.MappingQualityUnavailableFilter;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker; import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
@ -48,7 +46,6 @@ import org.broadinstitute.variant.vcf.VCFCodec;
import org.broadinstitute.variant.vcf.VCFHeader; import org.broadinstitute.variant.vcf.VCFHeader;
import org.broadinstitute.variant.vcf.VCFHeaderLine; import org.broadinstitute.variant.vcf.VCFHeaderLine;
import org.testng.Assert; import org.testng.Assert;
import org.testng.TestException;
import org.testng.annotations.DataProvider; import org.testng.annotations.DataProvider;
import org.testng.annotations.Test; import org.testng.annotations.Test;
@ -218,7 +215,8 @@ public class EngineFeaturesIntegrationTest extends WalkerTest {
1, Arrays.asList("")); 1, Arrays.asList(""));
spec.disableShadowBCF(); spec.disableShadowBCF();
final File vcf = executeTest("testGATKVersionInVCF", spec).first.get(0); final File vcf = executeTest("testGATKVersionInVCF", spec).first.get(0);
final VCFHeader header = (VCFHeader)new VCFCodec().readHeader(new AsciiLineReader(new FileInputStream(vcf))); final VCFCodec codec = new VCFCodec();
final VCFHeader header = (VCFHeader) codec.readActualHeader(codec.makeSourceFromStream(new FileInputStream(vcf)));
final VCFHeaderLine versionLine = header.getMetaDataLine(GATKVCFUtils.GATK_COMMAND_LINE_KEY); final VCFHeaderLine versionLine = header.getMetaDataLine(GATKVCFUtils.GATK_COMMAND_LINE_KEY);
Assert.assertNotNull(versionLine); Assert.assertNotNull(versionLine);
Assert.assertTrue(versionLine.toString().contains("SelectVariants")); Assert.assertTrue(versionLine.toString().contains("SelectVariants"));
@ -232,7 +230,8 @@ public class EngineFeaturesIntegrationTest extends WalkerTest {
1, Arrays.asList("")); 1, Arrays.asList(""));
spec.disableShadowBCF(); spec.disableShadowBCF();
final File vcf = executeTest("testMultipleGATKVersionsInVCF", spec).first.get(0); final File vcf = executeTest("testMultipleGATKVersionsInVCF", spec).first.get(0);
final VCFHeader header = (VCFHeader)new VCFCodec().readHeader(new AsciiLineReader(new FileInputStream(vcf))); final VCFCodec codec = new VCFCodec();
final VCFHeader header = (VCFHeader) codec.readActualHeader(codec.makeSourceFromStream(new FileInputStream(vcf)));
boolean foundHC = false; boolean foundHC = false;
boolean foundSV = false; boolean foundSV = false;

View File

@ -34,7 +34,7 @@ import java.io.IOException;
/** /**
* Feature reader with additional test utilities. The iterators can be checked to see if they are closed. * Feature reader with additional test utilities. The iterators can be checked to see if they are closed.
*/ */
public class TestFeatureReader extends TribbleIndexedFeatureReader<Feature> { public class TestFeatureReader extends TribbleIndexedFeatureReader<Feature, Object> {
public TestFeatureReader(String featurePath, FeatureCodec codec) throws IOException { public TestFeatureReader(String featurePath, FeatureCodec codec) throws IOException {
super(featurePath, codec, true); super(featurePath, codec, true);
} }

View File

@ -31,6 +31,7 @@ package org.broadinstitute.sting.utils.activeregion;
import net.sf.picard.reference.ReferenceSequenceFile; import net.sf.picard.reference.ReferenceSequenceFile;
import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.ArrayUtils;
import org.broad.tribble.readers.LineIterator;
import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.BaseTest;
import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser; import org.broadinstitute.sting.utils.GenomeLocParser;
@ -48,7 +49,10 @@ import org.testng.annotations.Test;
import java.io.File; import java.io.File;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.util.*; import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
public class BandPassActivityProfileUnitTest extends BaseTest { public class BandPassActivityProfileUnitTest extends BaseTest {
@ -261,7 +265,7 @@ public class BandPassActivityProfileUnitTest extends BaseTest {
final File file = new File(path); final File file = new File(path);
final VCFCodec codec = new VCFCodec(); final VCFCodec codec = new VCFCodec();
final Pair<VCFHeader, GATKVCFUtils.VCIterable> reader = GATKVCFUtils.readAllVCs(file, codec); final Pair<VCFHeader, GATKVCFUtils.VCIterable<LineIterator>> reader = GATKVCFUtils.readAllVCs(file, codec);
final List<ActiveRegion> incRegions = new ArrayList<ActiveRegion>(); final List<ActiveRegion> incRegions = new ArrayList<ActiveRegion>();
final BandPassActivityProfile incProfile = new BandPassActivityProfile(genomeLocParser, null); final BandPassActivityProfile incProfile = new BandPassActivityProfile(genomeLocParser, null);

View File

@ -26,7 +26,9 @@
package org.broadinstitute.sting.utils.codecs.hapmap; package org.broadinstitute.sting.utils.codecs.hapmap;
import org.broad.tribble.annotation.Strand; import org.broad.tribble.annotation.Strand;
import org.broad.tribble.readers.AsciiLineReader; import org.broad.tribble.readers.LineIterator;
import org.broad.tribble.readers.LineIteratorImpl;
import org.broad.tribble.readers.LineReaderUtil;
import org.broad.tribble.readers.PositionalBufferedStream; import org.broad.tribble.readers.PositionalBufferedStream;
import org.broadinstitute.sting.BaseTest; import org.broadinstitute.sting.BaseTest;
import org.testng.Assert; import org.testng.Assert;
@ -53,15 +55,13 @@ public class HapMapUnitTest extends BaseTest {
@Test @Test
public void testReadHeader() { public void testReadHeader() {
RawHapMapCodec codec = new RawHapMapCodec(); RawHapMapCodec codec = new RawHapMapCodec();
AsciiLineReader reader = getReader(); final LineIterator reader = getLineIterator();
try { try {
String header = reader.readLine(); String header = reader.next();
reader.close(); Assert.assertTrue(header.equals(codec.readActualHeader(getLineIterator())));
Assert.assertTrue(header.equals(codec.readHeader(getReader()))); } finally {
} catch (IOException e) { codec.close(reader);
Assert.fail("Unable to read from file " + hapMapFile);
} }
reader.close();
} }
@Test @Test
@ -114,22 +114,20 @@ public class HapMapUnitTest extends BaseTest {
public void testReadCorrectNumberOfRecords() { public void testReadCorrectNumberOfRecords() {
// setup the record for reading our 500 line file (499 records, 1 header line) // setup the record for reading our 500 line file (499 records, 1 header line)
RawHapMapCodec codec = new RawHapMapCodec(); RawHapMapCodec codec = new RawHapMapCodec();
AsciiLineReader reader = getReader(); final LineIterator reader = getLineIterator();
String line;
int count = 0; int count = 0;
try { try {
codec.readHeader(reader); codec.readHeader(reader);
line = reader.readLine(); while (reader.hasNext()) {
while (line != null) { codec.decode(reader.next());
codec.decode(line);
line = reader.readLine();
++count; ++count;
} }
} catch (IOException e) { } catch (IOException e) {
Assert.fail("IOException " + e.getMessage()); Assert.fail("IOException " + e.getMessage());
} finally {
codec.close(reader);
} }
reader.close();
Assert.assertEquals(count,499); Assert.assertEquals(count,499);
} }
@ -137,25 +135,26 @@ public class HapMapUnitTest extends BaseTest {
public void testGetSampleNames() { public void testGetSampleNames() {
// setup the record for reading our 500 line file (499 records, 1 header line) // setup the record for reading our 500 line file (499 records, 1 header line)
RawHapMapCodec codec = new RawHapMapCodec(); RawHapMapCodec codec = new RawHapMapCodec();
AsciiLineReader reader = getReader(); final LineIterator reader = getLineIterator();
String line; String line;
try { try {
codec.readHeader(reader); codec.readHeader(reader);
line = reader.readLine(); line = reader.next();
RawHapMapFeature feature = (RawHapMapFeature) codec.decode(line); RawHapMapFeature feature = (RawHapMapFeature) codec.decode(line);
Assert.assertEquals(feature.getSampleIDs().length,87); Assert.assertEquals(feature.getSampleIDs().length,87);
} catch (IOException e) { } catch (IOException e) {
Assert.fail("IOException " + e.getMessage()); Assert.fail("IOException " + e.getMessage());
} finally {
codec.close(reader);
} }
reader.close();
} }
public AsciiLineReader getReader() { public LineIterator getLineIterator() {
try { try {
return new AsciiLineReader(new PositionalBufferedStream(new FileInputStream(hapMapFile))); return new LineIteratorImpl(LineReaderUtil.fromBufferedStream(new PositionalBufferedStream(new FileInputStream(hapMapFile))));
} catch (FileNotFoundException e) { } catch (FileNotFoundException e) {
Assert.fail("Unable to open hapmap file : " + hapMapFile); Assert.fail("Unable to open hapmap file : " + hapMapFile);
} }

View File

@ -105,7 +105,7 @@ public class VariantContextBenchmark extends SimpleBenchmark {
public void run(T vc); public void run(T vc);
} }
private <T extends Feature> void runBenchmark(FeatureCodec<T> codec, FunctionToBenchmark<T> func) { private <T extends Feature> void runBenchmark(FeatureCodec codec, FunctionToBenchmark<T> func) {
// TODO -- update for new Tribble interface // TODO -- update for new Tribble interface
// try { // try {
// InputStream is = new ByteArrayInputStream(INPUT_STRING.getBytes()); // InputStream is = new ByteArrayInputStream(INPUT_STRING.getBytes());
@ -129,7 +129,7 @@ public class VariantContextBenchmark extends SimpleBenchmark {
public void timeV14(int rep) { public void timeV14(int rep) {
for ( int i = 0; i < rep; i++ ) { for ( int i = 0; i < rep; i++ ) {
FunctionToBenchmark<VariantContext> func = getV14FunctionToBenchmark(); FunctionToBenchmark<VariantContext> func = getV14FunctionToBenchmark();
FeatureCodec<VariantContext> codec = new VCFCodec(); final VCFCodec codec = new VCFCodec();
runBenchmark(codec, func); runBenchmark(codec, func);
} }
} }

View File

@ -31,14 +31,12 @@ import scala.io.Source._
import net.sf.samtools.SAMFileReader import net.sf.samtools.SAMFileReader
import org.broadinstitute.variant.vcf.{VCFHeader, VCFCodec} import org.broadinstitute.variant.vcf.{VCFHeader, VCFCodec}
import scala.collection.JavaConversions._ import scala.collection.JavaConversions._
import org.broad.tribble.{FeatureCodec, AbstractFeatureReader} import org.broad.tribble.AbstractFeatureReader
import org.broadinstitute.variant.variantcontext.VariantContext
object VCF_BAM_utilities { object VCF_BAM_utilities {
def getSamplesFromVCF(vcfFile: File): List[String] = { def getSamplesFromVCF(vcfFile: File): List[String] = {
val codec: FeatureCodec[VariantContext] = new VCFCodec().asInstanceOf[FeatureCodec[VariantContext]] AbstractFeatureReader.getFeatureReader(vcfFile.getPath, new VCFCodec()).getHeader.asInstanceOf[VCFHeader].getGenotypeSamples.toList
AbstractFeatureReader.getFeatureReader(vcfFile.getPath, codec).getHeader.asInstanceOf[VCFHeader].getGenotypeSamples.toList
} }
def getSamplesInBAM(bam: File): List[String] = { def getSamplesInBAM(bam: File): List[String] = {

View File

@ -1,3 +1,3 @@
<ivy-module version="1.0"> <ivy-module version="1.0">
<info organisation="net.sf" module="picard" revision="1.96.1515" status="release" /> <info organisation="net.sf" module="picard" revision="1.96.1525" status="release" />
</ivy-module> </ivy-module>

View File

@ -1,3 +0,0 @@
<ivy-module version="1.0">
<info organisation="net.sf" module="sam" revision="1.96.1515" status="release" />
</ivy-module>

View File

@ -0,0 +1,3 @@
<ivy-module version="1.0">
<info organisation="net.sf" module="sam" revision="1.96.1525" status="release" />
</ivy-module>

View File

@ -1,3 +1,3 @@
<ivy-module version="1.0"> <ivy-module version="1.0">
<info organisation="org.broad" module="tribble" revision="1.96.1515" status="integration" /> <info organisation="org.broad" module="tribble" revision="1.96.1526" status="integration" />
</ivy-module> </ivy-module>

View File

@ -1,3 +1,3 @@
<ivy-module version="1.0"> <ivy-module version="1.0">
<info organisation="org.broadinstitute" module="variant" revision="1.96.1515" status="integration" /> <info organisation="org.broadinstitute" module="variant" revision="1.96.1525" status="integration" />
</ivy-module> </ivy-module>