Parallelized CountCovarites! percent_ref_called_var now a standard genotype concordance module (for validation!). Really much smarter merging of headers for combineVariants. VCF codecs now actually look at the file version and blow up if they are the wrong versions. setHeaderVersion() in VCFHeaderLine.

git-svn-id: file:///humgen/gsa-scr1/gsa-engineering/svn_contents/trunk@3802 348d0f76-0448-11de-a6fe-93d51630548a
This commit is contained in:
depristo 2010-07-16 14:10:18 +00:00
parent f293eb7de1
commit b29eda83bb
13 changed files with 111 additions and 82 deletions

View File

@ -72,6 +72,8 @@ public class VCFCodec implements FeatureCodec {
try {
while ((line = reader.readLine()) != null) {
if (line.startsWith("##")) {
if ( line.startsWith("##fileformat") && ! line.startsWith("##fileformat=VCFv3" ) )
throw new CodecLineParsingException("VCF codec can only parse VCF3 formated files. Your version line is " + line + ". If you want to parse VCF4, use VCF4 use VCF as the rod type");
headerStrings.add(line);
}
else if (line.startsWith("#")) {

View File

@ -42,6 +42,7 @@ public class VCFHeaderLine implements Comparable {
private String stringRep = null;
private String mKey = null;
private String mValue = null;
protected VCFHeaderVersion mVersion = null;
/**
@ -135,6 +136,10 @@ public class VCFHeaderLine implements Comparable {
this.mVersion = version;
}
public VCFHeaderVersion getVersion() {
return mVersion;
}
/**
* create a string of a mapping pair for the target VCF version
* @param keyValues a mapping of the key->value pairs to output

View File

@ -80,6 +80,8 @@ public class VCF4Codec implements FeatureCodec, NameAwareCodec {
while ((line = reader.readLine()) != null) {
lineNo++;
if (line.startsWith("##")) {
if ( line.startsWith("##fileformat") && ! line.startsWith("##fileformat=VCFv4" ) )
throw new CodecLineParsingException("VCF4 codec can only parse VCF4 formated files. Your version line is " + line + ". If you want VCF3 parsing, use VCF as the rod type.");
headerStrings.add(line);
}
else if (line.startsWith("#")) {

View File

@ -70,7 +70,7 @@ import java.util.Map;
@WalkerName( "CountCovariates" )
@ReadFilters( {ZeroMappingQualityReadFilter.class} ) // Filter out all reads with zero mapping quality
@Requires( {DataSource.READS, DataSource.REFERENCE, DataSource.REFERENCE_BASES} ) // This walker requires both -I input.bam and -R reference.fasta
public class CovariateCounterWalker extends LocusWalker<Integer, PrintStream> {
public class CovariateCounterWalker extends LocusWalker<Integer, PrintStream> implements TreeReducible<PrintStream> {
/////////////////////////////
// Constants
@ -87,6 +87,9 @@ public class CovariateCounterWalker extends LocusWalker<Integer, PrintStream> {
/////////////////////////////
// Command Line Arguments
/////////////////////////////
@Argument(fullName="recal_file", shortName="recalFile", required=true, doc="Filename for the outputted covariates table recalibration file")
public PrintStream RECAL_FILE;
@Argument(fullName="list", shortName="ls", doc="List the available covariates and exit", required=false)
private boolean LIST_ONLY = false;
@Argument(fullName="covariate", shortName="cov", doc="Covariates to be used in the recalibration. Each covariate is given as a separate cov parameter. ReadGroup and ReportedQuality are required covariates and are already added for you.", required=false)
@ -230,6 +233,12 @@ public class CovariateCounterWalker extends LocusWalker<Integer, PrintStream> {
logger.info( "\t" + cov.getClass().getSimpleName() );
cov.initialize( RAC ); // Initialize any covariate member variables using the shared argument collection
}
// try {
// stream = new PrintStream( RAC.RECAL_FILE );
// } catch ( FileNotFoundException e ) {
// throw new RuntimeException( "Couldn't open output file: ", e );
// }
}
@ -263,15 +272,10 @@ public class CovariateCounterWalker extends LocusWalker<Integer, PrintStream> {
if( !isSNP && ( ++numUnprocessed >= PROCESS_EVERY_NTH_LOCUS ) ) {
numUnprocessed = 0; // Reset the counter because we are processing this very locus
GATKSAMRecord gatkRead;
int offset;
byte refBase;
byte[] bases;
// For each read at this locus
for( PileupElement p : context.getBasePileup() ) {
gatkRead = (GATKSAMRecord) p.getRead();
offset = p.getOffset();
GATKSAMRecord gatkRead = (GATKSAMRecord) p.getRead();
int offset = p.getOffset();
if( gatkRead.containsTemporaryAttribute( SKIP_RECORD_ATTRIBUTE ) ) {
continue;
@ -297,8 +301,8 @@ public class CovariateCounterWalker extends LocusWalker<Integer, PrintStream> {
// Skip this position if base quality is zero
if( gatkRead.getBaseQualities()[offset] > 0 ) {
bases = gatkRead.getReadBases();
refBase = ref.getBase();
byte[] bases = gatkRead.getReadBases();
byte refBase = ref.getBase();
// Skip if this base is an 'N' or etc.
if( BaseUtils.isRegularBase( bases[offset] ) ) {
@ -390,8 +394,6 @@ public class CovariateCounterWalker extends LocusWalker<Integer, PrintStream> {
* @param refBase The reference base at this locus
*/
private void updateDataFromRead(final GATKSAMRecord gatkRead, final int offset, final byte refBase) {
final Object[][] covars = (Comparable[][]) gatkRead.getTemporaryAttribute(COVARS_ATTRIBUTE);
final Object[] key = covars[offset];
@ -426,11 +428,7 @@ public class CovariateCounterWalker extends LocusWalker<Integer, PrintStream> {
* @return returns A PrintStream created from the -recalFile filename argument specified to the walker
*/
public PrintStream reduceInit() {
try {
return new PrintStream( RAC.RECAL_FILE );
} catch ( FileNotFoundException e ) {
throw new RuntimeException( "Couldn't open output file: ", e );
}
return RECAL_FILE;
}
/**
@ -440,7 +438,11 @@ public class CovariateCounterWalker extends LocusWalker<Integer, PrintStream> {
* @return returns The PrintStream used to output the CSV data
*/
public PrintStream reduce( Integer value, PrintStream recalTableStream ) {
return recalTableStream; // Nothing to do here
return recalTableStream; // Nothing to do here, just return our open stream
}
public PrintStream treeReduce( PrintStream recalTableStream1, PrintStream recalTableStream2 ) {
return recalTableStream1; // Nothing to do here, just return our open stream
}
/**

View File

@ -71,22 +71,22 @@ public class RecalDatumOptimized {
//
//---------------------------------------------------------------------------------------------------------------
public final void increment( final long incObservations, final long incMismatches ) {
public synchronized final void increment( final long incObservations, final long incMismatches ) {
numObservations += incObservations;
numMismatches += incMismatches;
}
public final void increment( final RecalDatumOptimized other ) {
public synchronized final void increment( final RecalDatumOptimized other ) {
increment( other.numObservations, other.numMismatches );
}
public final void increment( final List<RecalDatumOptimized> data ) {
public synchronized final void increment( final List<RecalDatumOptimized> data ) {
for ( RecalDatumOptimized other : data ) {
this.increment( other );
}
}
public final void increment( final char curBase, final char refBase ) {
public synchronized final void increment( final char curBase, final char refBase ) {
increment( 1, BaseUtils.simpleBaseToBaseIndex(curBase) == BaseUtils.simpleBaseToBaseIndex(refBase) ? 0 : 1 ); // increment takes num observations, then num mismatches
}

View File

@ -41,8 +41,6 @@ public class RecalibrationArgumentCollection {
//////////////////////////////////
// Shared Command Line Arguments
//////////////////////////////////
@Argument(fullName="recal_file", shortName="recalFile", required=false, doc="Filename for the outputted covariates table recalibration file")
public String RECAL_FILE = "output.recal_data.csv";
@Argument(fullName="default_read_group", shortName="dRG", required=false, doc="If a read has no read group then default to the provided String.")
public String DEFAULT_READ_GROUP = null;
@Argument(fullName="default_platform", shortName="dP", required=false, doc="If a read has no platform then default to the provided String. Valid options are illumina, 454, and solid.")

View File

@ -87,6 +87,9 @@ public class TableRecalibrationWalker extends ReadWalker<SAMRecord, SAMFileWrite
/////////////////////////////
@ArgumentCollection private RecalibrationArgumentCollection RAC = new RecalibrationArgumentCollection();
@Argument(fullName="recal_file", shortName="recalFile", required=false, doc="Filename for the outputted covariates table recalibration file")
public String RECAL_FILE = "output.recal_data.csv";
/////////////////////////////
// Command Line Arguments
/////////////////////////////
@ -168,7 +171,7 @@ public class TableRecalibrationWalker extends ReadWalker<SAMRecord, SAMFileWrite
boolean sawEOF = false;
try {
for ( String line : new XReadLines(new File( RAC.RECAL_FILE )) ) {
for ( String line : new XReadLines(new File( RECAL_FILE )) ) {
lineNumber++;
if ( EOF_MARKER.equals(line) ) {
sawEOF = true;
@ -178,7 +181,7 @@ public class TableRecalibrationWalker extends ReadWalker<SAMRecord, SAMFileWrite
// Read in the covariates that were used from the input file
else if( COVARIATE_PATTERN.matcher(line).matches() ) { // The line string is either specifying a covariate or is giving csv data
if( foundAllCovariates ) {
throw new StingException( "Malformed input recalibration file. Found covariate names intermingled with data in file: " + RAC.RECAL_FILE );
throw new StingException( "Malformed input recalibration file. Found covariate names intermingled with data in file: " + RECAL_FILE );
} else { // Found the covariate list in input file, loop through all of them and instantiate them
String[] vals = line.split(",");
for( int iii = 0; iii < vals.length - 3; iii++ ) { // There are n-3 covariates. The last three items are nObservations, nMismatch, and Qempirical
@ -210,7 +213,7 @@ public class TableRecalibrationWalker extends ReadWalker<SAMRecord, SAMFileWrite
// At this point all the covariates should have been found and initialized
if( requestedCovariates.size() < 2 ) {
throw new StingException( "Malformed input recalibration csv file. Covariate names can't be found in file: " + RAC.RECAL_FILE );
throw new StingException( "Malformed input recalibration csv file. Covariate names can't be found in file: " + RECAL_FILE );
}
final boolean createCollapsedTables = true;
@ -228,7 +231,7 @@ public class TableRecalibrationWalker extends ReadWalker<SAMRecord, SAMFileWrite
}
} catch ( FileNotFoundException e ) {
throw new StingException("Can not find input file: " + RAC.RECAL_FILE);
throw new StingException("Can not find input file: " + RECAL_FILE);
} catch ( NumberFormatException e ) {
throw new StingException("Error parsing recalibration data at line " + lineNumber + ". Perhaps your table was generated by an older version of CovariateCounterWalker.");
}

View File

@ -431,6 +431,7 @@ class SampleStats implements TableType {
class SampleSummaryStats implements TableType {
private final static String ALL_SAMPLES_KEY = "allSamples";
private final static String[] COLUMN_KEYS = new String[]{
"percent_comp_ref_called_var",
"percent_comp_het_called_het",
"percent_comp_het_called_var",
"percent_comp_hom_called_hom",
@ -532,45 +533,50 @@ class SampleSummaryStats implements TableType {
long numer, denom;
// Summary 0: % het called as het
numer = stats[Genotype.Type.HET.ordinal()][Genotype.Type.HET.ordinal()];
denom = sumStatsAllPairs(stats, EnumSet.of(Genotype.Type.HET), allGenotypes);
// Summary 0: % ref called as var
numer = sumStatsAllPairs(stats, EnumSet.of(Genotype.Type.HOM_REF), allVariantGenotypes);
denom = sumStatsAllPairs(stats, EnumSet.of(Genotype.Type.HOM_REF), allGenotypes);
updateSummaries(0, summary, numer, denom);
// Summary 1: % het called as var
numer = sumStatsAllPairs(stats, EnumSet.of(Genotype.Type.HET), allVariantGenotypes);
// Summary 1: % het called as het
numer = stats[Genotype.Type.HET.ordinal()][Genotype.Type.HET.ordinal()];
denom = sumStatsAllPairs(stats, EnumSet.of(Genotype.Type.HET), allGenotypes);
updateSummaries(1, summary, numer, denom);
// Summary 2: % homVar called as homVar
numer = stats[Genotype.Type.HOM_VAR.ordinal()][Genotype.Type.HOM_VAR.ordinal()];
denom = sumStatsAllPairs(stats, EnumSet.of(Genotype.Type.HOM_VAR), allGenotypes);
// Summary 2: % het called as var
numer = sumStatsAllPairs(stats, EnumSet.of(Genotype.Type.HET), allVariantGenotypes);
denom = sumStatsAllPairs(stats, EnumSet.of(Genotype.Type.HET), allGenotypes);
updateSummaries(2, summary, numer, denom);
// Summary 3: % homVars called as var
numer = sumStatsAllPairs(stats, EnumSet.of(Genotype.Type.HOM_VAR), allVariantGenotypes);
// Summary 3: % homVar called as homVar
numer = stats[Genotype.Type.HOM_VAR.ordinal()][Genotype.Type.HOM_VAR.ordinal()];
denom = sumStatsAllPairs(stats, EnumSet.of(Genotype.Type.HOM_VAR), allGenotypes);
updateSummaries(3, summary, numer, denom);
// Summary 4: % non-ref called as non-ref
// Summary 4: % homVars called as var
numer = sumStatsAllPairs(stats, EnumSet.of(Genotype.Type.HOM_VAR), allVariantGenotypes);
denom = sumStatsAllPairs(stats, EnumSet.of(Genotype.Type.HOM_VAR), allGenotypes);
updateSummaries(4, summary, numer, denom);
// Summary 5: % non-ref called as non-ref
// MAD: this is known as the non-reference sensitivity (# non-ref according to comp found in eval / # non-ref in comp)
numer = sumStatsAllPairs(stats, allVariantGenotypes, allVariantGenotypes);
denom = sumStatsAllPairs(stats, allVariantGenotypes, allGenotypes);
updateSummaries(4, summary, numer, denom);
updateSummaries(5, summary, numer, denom);
// Summary 5: overall genotype concordance of sites called in eval track
// Summary 6: overall genotype concordance of sites called in eval track
// MAD: this is the tradition genotype concordance
numer = sumStatsDiag(stats, allCalledGenotypes);
denom = sumStatsAllPairs(stats, allCalledGenotypes, allCalledGenotypes);
updateSummaries(5, summary, numer, denom);
updateSummaries(6, summary, numer, denom);
// Summary 6: overall genotype concordance of sites called non-ref in eval track
// Summary 7: overall genotype concordance of sites called non-ref in eval track
long homrefConcords = stats[Genotype.Type.HOM_REF.ordinal()][Genotype.Type.HOM_REF.ordinal()];
long diag = sumStatsDiag(stats, allVariantGenotypes);
long allNoHomRef = sumStatsAllPairs(stats, allCalledGenotypes, allCalledGenotypes) - homrefConcords;
numer = allNoHomRef - diag;
denom = allNoHomRef;
updateSummaries(6, summary, numer, denom);
updateSummaries(7, summary, numer, denom);
}
// update the final summary stats

View File

@ -52,7 +52,7 @@ public class NestedHashMap{
return null;
}
public void put( final Object value, final Object... keys ) {
public synchronized void put( final Object value, final Object... keys ) {
Map map = this.data;
final int keysLength = keys.length;

View File

@ -133,12 +133,13 @@ public class VCFUtils {
//System.out.printf("Merging in header %s%n", source);
for ( VCFHeaderLine line : source.getMetaData()) {
String key = line.getKey();
if ( line instanceof VCFNamedHeaderLine )
key = key + "." + ((VCFNamedHeaderLine) line).getName();
if ( map.containsKey(key) ) {
VCFHeaderLine other = map.get(key);
if ( line.equals(other) )
if ( line.equals(other) || line.getVersion() != VCFHeaderVersion.VCF4_0 ) // todo -- remove me when everything is 4
continue;
else if ( ! line.getClass().equals(other.getClass()) )
throw new IllegalStateException("Incompatible header types: " + line + " " + other );
@ -172,7 +173,7 @@ public class VCFUtils {
if ( logger != null ) logger.warn(String.format("Ignoring header line already in map: this header line = " + line + " already present header = " + other));
}
} else {
line.setVersion(VCFHeaderVersion.VCF4_0);
line.setVersion(VCFHeaderVersion.VCF4_0); // todo -- remove this when we finally have vcf3/4 unified headers
map.put(key, line);
//System.out.printf("Adding header line %s%n", line);
}

View File

@ -22,28 +22,30 @@ public class RecalibrationWalkersIntegrationTest extends WalkerTest {
e.put( validationDataLocation + "NA12873.454.SRP000031.2009_06.chr1.10_20mb.bam", "596a9ec9cbc1da70481e45a5a588a41d" );
e.put( validationDataLocation + "NA12878.1kg.p2.chr1_10mb_11_mb.allTechs.bam", "507dbd3ba6f54e066d04c4d24f59c3ab" );
for ( Map.Entry<String, String> entry : e.entrySet() ) {
String bam = entry.getKey();
String md5 = entry.getValue();
for ( String parallelism : Arrays.asList("") ) { // todo -- enable parallel tests. They work but there's a system bug Arrays.asList("", " -nt 4")) {
for ( Map.Entry<String, String> entry : e.entrySet() ) {
String bam = entry.getKey();
String md5 = entry.getValue();
WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
"-R " + oneKGLocation + "reference/human_b36_both.fasta" +
" --DBSNP /humgen/gsa-scr1/GATK_Data/dbsnp_129_b36.rod" +
" -T CountCovariates" +
" -I " + bam +
( bam.equals( validationDataLocation + "NA12878.1kg.p2.chr1_10mb_11_mb.allTechs.bam" )
? " -L 1:10,800,000-10,810,000" : " -L 1:10,000,000-10,200,000" ) +
" -cov ReadGroupCovariate" +
" -cov QualityScoreCovariate" +
" -cov CycleCovariate" +
" -cov DinucCovariate" +
" -cov TileCovariate" +
" --solid_recal_mode SET_Q_ZERO" +
" -recalFile %s",
1, // just one output file
Arrays.asList(md5));
List<File> result = executeTest("testCountCovariates1", spec).getFirst();
paramsFiles.put(bam, result.get(0).getAbsolutePath());
WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
"-R " + oneKGLocation + "reference/human_b36_both.fasta" +
" --DBSNP /humgen/gsa-scr1/GATK_Data/dbsnp_129_b36.rod" +
" -T CountCovariates" +
" -I " + bam +
( bam.equals( validationDataLocation + "NA12878.1kg.p2.chr1_10mb_11_mb.allTechs.bam" )
? " -L 1:10,800,000-10,810,000" : " -L 1:10,000,000-10,200,000" ) +
" -cov ReadGroupCovariate" +
" -cov QualityScoreCovariate" +
" -cov CycleCovariate" +
" -cov DinucCovariate" +
" -cov TileCovariate" +
" --solid_recal_mode SET_Q_ZERO" +
" -recalFile %s" + parallelism,
1, // just one output file
Arrays.asList(md5));
List<File> result = executeTest("testCountCovariates1" + parallelism, spec).getFirst();
paramsFiles.put(bam, result.get(0).getAbsolutePath());
}
}
}

View File

@ -8,8 +8,7 @@ import java.util.ArrayList;
public class RecalibrationWalkersPerformanceTest extends WalkerTest {
@Test
public void testCountCovariatesWholeGenome() {
private void testCountCovariatesWholeGenomeRunner(String moreArgs) {
WalkerTestSpec spec = new WalkerTestSpec(
"-R " + seqLocation + "references/Homo_sapiens_assembly18/v0/Homo_sapiens_assembly18.fasta" +
" -T CountCovariates" +
@ -17,14 +16,13 @@ public class RecalibrationWalkersPerformanceTest extends WalkerTest {
" -L chr1:1-50,000,000" +
" -standard" +
" -OQ" +
" -recalFile /dev/null",
" -recalFile /dev/null" + moreArgs,
0,
new ArrayList<String>(0));
executeTest("testCountCovariatesWholeGenome", spec);
}
@Test
public void testCountCovariatesWholeExome() {
private void testCountCovariatesWholeExomeRunner(String moreArgs) {
WalkerTestSpec spec = new WalkerTestSpec(
"-R " + seqLocation + "references/Homo_sapiens_assembly18/v0/Homo_sapiens_assembly18.fasta" +
" -T CountCovariates" +
@ -32,12 +30,22 @@ public class RecalibrationWalkersPerformanceTest extends WalkerTest {
" -L " + evaluationDataLocation + "whole_exome_agilent_designed_120.targets.chr1.interval_list" +
" -standard" +
" -OQ" +
" -recalFile /dev/null",
" -recalFile /dev/null" + moreArgs,
0,
new ArrayList<String>(0));
executeTest("testCountCovariatesWholeExome", spec);
}
@Test
public void testCountCovariatesWholeGenome() { testCountCovariatesWholeGenomeRunner(""); }
@Test
public void testCountCovariatesWholeGenomeParallel() { testCountCovariatesWholeGenomeRunner(" -nt 4"); }
@Test
public void testCountCovariatesWholeExome() { testCountCovariatesWholeExomeRunner(""); }
@Test
public void testCountCovariatesWholeExomeParallel() { testCountCovariatesWholeExomeRunner(" -nt 4"); }
@Test
public void testTableRecalibratorWholeGenome() {
WalkerTestSpec spec = new WalkerTestSpec(

View File

@ -29,7 +29,7 @@ public class
String extraArgs = "-L 1:1-10,000,000";
for (String tests : testsEnumerations) {
WalkerTestSpec spec = new WalkerTestSpec(withSelect(tests, "DP < 50", "DP50") + " " + extraArgs + " -o %s",
1, Arrays.asList("97d2471ed6ee79d70ce5bd9cc0be2239"));
1, Arrays.asList("119601d7e9e67a1053663b2e546250ed"));
executeTest("testSelect1", spec);
}
}
@ -38,7 +38,7 @@ public class
public void testSelect2() {
String extraArgs = "-L 1:1-10,000,000";
WalkerTestSpec spec = new WalkerTestSpec( withSelect(withSelect(root, "DP < 50", "DP50"), "set==\"Intersection\"", "intersection") + " " + extraArgs + " -o %s",
1, Arrays.asList("e3366d73ee7cdf630c29809ce230e32e"));
1, Arrays.asList("06d495ab8169a2570eebdc54ecdffe10"));
executeTest("testSelect2", spec);
}
@ -48,7 +48,7 @@ public class
for (String vcfFile : vcfFiles) {
WalkerTestSpec spec = new WalkerTestSpec(cmdRoot + " -B eval,VCF," + validationDataLocation + vcfFile + " -B comp,VCF," + validationDataLocation + "GenotypeConcordanceComp.vcf -E GenotypeConcordance -reportType CSV -o %s",
1,
Arrays.asList("51574b4ab0b381c5a01268f91e78b25c"));
Arrays.asList("15d1075d384da2bb7445f7493f2b6a07"));
executeTest("testVEGenotypeConcordance" + vcfFile, spec);
}
@ -57,8 +57,8 @@ public class
@Test
public void testVESimple() {
HashMap<String, String> expectations = new HashMap<String, String>();
expectations.put("-L 1:1-10,000,000", "8f76d8c0e3a8a5836bb5bf423e04c268");
expectations.put("-L 1:1-10,000,000 -family NA19238+NA19239=NA19240 -MVQ 0", "6b128bacbd0402471bd6d4e3f9283c47");
expectations.put("-L 1:1-10,000,000", "629b8b124306435ff56b66357354dfbc");
expectations.put("-L 1:1-10,000,000 -family NA19238+NA19239=NA19240 -MVQ 0", "f51c299d500b347d098c7ab25f54a436");
for ( Map.Entry<String, String> entry : expectations.entrySet() ) {
String extraArgs = entry.getKey();
@ -80,10 +80,10 @@ public class
" -B comp_hapmap,VCF," + validationDataLocation + "CEU_hapmap_nogt_23.vcf";
String matchingMD5 = "aca5a64a0e4850906db2bd820253b784";
String matchingMD5 = "d01725ce4e46c8fea0855a923c1598fd";
expectations.put("", matchingMD5);
expectations.put(" -known comp_hapmap -known dbsnp", matchingMD5);
expectations.put(" -known comp_hapmap", "442213609c2866f7a90cbc4b3486441a");
expectations.put(" -known comp_hapmap", "a50be9240f6c90503fb6333d8a78b974");
for (String tests : testsEnumerations) {
for (Map.Entry<String, String> entry : expectations.entrySet()) {
String extraArgs2 = entry.getKey();
@ -118,7 +118,7 @@ public class
for (String tests : testsEnumerations) {
WalkerTestSpec spec = new WalkerTestSpec(tests + " " + extraArgs + " -o %s -outputVCF %s",
2,
Arrays.asList("dc53aaf7db9f05e3b0a38bf5efe3fbbe", "d94328f4a5f7c40e95edf2ef13f38ae0"));
Arrays.asList("483f821ce96f4cf571e9bba356c9f325", "d94328f4a5f7c40e95edf2ef13f38ae0"));
executeTest("testVEWriteVCF", spec);
}
}