Merge branch 'master' of ssh://nickel.broadinstitute.org/humgen/gsa-scr1/gsa-engineering/git/unstable

This commit is contained in:
Eric Banks 2012-02-27 11:31:41 -05:00
commit 64754e7870
15 changed files with 230 additions and 53 deletions

View File

@ -175,8 +175,8 @@ public class LocusIteratorByState extends LocusIterator {
return String.format("%s ro=%d go=%d co=%d cec=%d %s", read.getReadName(), readOffset, genomeOffset, cigarOffset, cigarElementCounter, curElement);
}
public CigarOperator peekForwardOnGenome() {
return ( cigarElementCounter + 1 > curElement.getLength() && cigarOffset + 1 < nCigarElements ? cigar.getCigarElement(cigarOffset + 1) : curElement ).getOperator();
public CigarElement peekForwardOnGenome() {
return ( cigarElementCounter + 1 > curElement.getLength() && cigarOffset + 1 < nCigarElements ? cigar.getCigarElement(cigarOffset + 1) : curElement );
}
public CigarOperator stepForwardOnGenome() {
@ -462,15 +462,19 @@ public class LocusIteratorByState extends LocusIterator {
final SAMRecordState state = iterator.next(); // state object with the read/offset information
final GATKSAMRecord read = (GATKSAMRecord) state.getRead(); // the actual read
final CigarOperator op = state.getCurrentCigarOperator(); // current cigar operator
final CigarOperator nextOp = state.peekForwardOnGenome(); // next cigar operator
final CigarElement nextElement = state.peekForwardOnGenome(); // next cigar element
final CigarOperator nextOp = nextElement.getOperator();
final int readOffset = state.getReadOffset(); // the base offset on this read
int nextElementLength = nextElement.getLength();
if (op == CigarOperator.N) // N's are never added to any pileup
continue;
if (op == CigarOperator.D) {
if (readInfo.includeReadsWithDeletionAtLoci()) { // only add deletions to the pileup if we are authorized to do so
pile.add(new PileupElement(read, readOffset, true, nextOp == CigarOperator.D, nextOp == CigarOperator.I, nextOp == CigarOperator.S || (state.getGenomeOffset() == 0 && read.getSoftStart() != read.getAlignmentStart())));
pile.add(new PileupElement(read, readOffset, true, nextOp == CigarOperator.D, nextOp == CigarOperator.I, nextOp == CigarOperator.S || (state.getGenomeOffset() == 0 && read.getSoftStart() != read.getAlignmentStart()),
null,nextOp == CigarOperator.D? nextElementLength:-1));
size++;
nDeletions++;
if (read.getMappingQuality() == 0)
@ -479,7 +483,12 @@ public class LocusIteratorByState extends LocusIterator {
}
else {
if (!filterBaseInRead(read, location.getStart())) {
pile.add(new PileupElement(read, readOffset, false, nextOp == CigarOperator.D, nextOp == CigarOperator.I, nextOp == CigarOperator.S || (state.getGenomeOffset() == 0 && read.getSoftStart() != read.getAlignmentStart())));
String insertedBaseString = null;
if (nextOp == CigarOperator.I) {
insertedBaseString = new String(Arrays.copyOfRange(read.getReadBases(), readOffset + 1, readOffset + 1 + nextElement.getLength()));
}
pile.add(new PileupElement(read, readOffset, false, nextOp == CigarOperator.D, nextOp == CigarOperator.I, nextOp == CigarOperator.S || (state.getGenomeOffset() == 0 && read.getSoftStart() != read.getAlignmentStart()),
insertedBaseString,nextElementLength));
size++;
if (read.getMappingQuality() == 0)
nMQ0Reads++;

View File

@ -26,6 +26,7 @@
package org.broadinstitute.sting.gatk.walkers.coverage;
import net.sf.samtools.SAMReadGroupRecord;
import org.broadinstitute.sting.commandline.Advanced;
import org.broadinstitute.sting.commandline.Argument;
import org.broadinstitute.sting.commandline.Output;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
@ -119,21 +120,6 @@ public class DepthOfCoverageWalker extends LocusWalker<Map<DoCOutputType.Partiti
@Multiplex(value=DoCOutputMultiplexer.class,arguments={"partitionTypes","refSeqGeneList","omitDepthOutput","omitIntervals","omitSampleSummary","omitLocusTable"})
Map<DoCOutputType,PrintStream> out;
/**
* Sets the low-coverage cutoff for granular binning. All loci with depth < START are counted in the first bin.
*/
@Argument(fullName = "start", doc = "Starting (left endpoint) for granular binning", required = false)
int start = 1;
/**
* Sets the high-coverage cutoff for granular binning. All loci with depth > END are counted in the last bin.
*/
@Argument(fullName = "stop", doc = "Ending (right endpoint) for granular binning", required = false)
int stop = 500;
/**
* Sets the number of bins for granular binning
*/
@Argument(fullName = "nBins", doc = "Number of bins to use for granular binning", required = false)
int nBins = 499;
@Argument(fullName = "minMappingQuality", shortName = "mmq", doc = "Minimum mapping quality of reads to count towards depth. Defaults to -1.", required = false)
int minMappingQuality = -1;
@Argument(fullName = "maxMappingQuality", doc = "Maximum mapping quality of reads to count towards depth. Defaults to 2^31-1 (Integer.MAX_VALUE).", required = false)
@ -142,16 +128,19 @@ public class DepthOfCoverageWalker extends LocusWalker<Map<DoCOutputType.Partiti
byte minBaseQuality = -1;
@Argument(fullName = "maxBaseQuality", doc = "Maximum quality of bases to count towards depth. Defaults to 127 (Byte.MAX_VALUE).", required = false)
byte maxBaseQuality = Byte.MAX_VALUE;
/**
* Instead of reporting depth, report the base pileup at each locus
*/
@Argument(fullName = "printBaseCounts", shortName = "baseCounts", doc = "Will add base counts to per-locus output.", required = false)
boolean printBaseCounts = false;
/**
* Do not tabulate locus statistics (# loci covered by sample by coverage)
*/
@Argument(fullName = "omitLocusTable", shortName = "omitLocusTable", doc = "Will not calculate the per-sample per-depth counts of loci, which should result in speedup", required = false)
boolean omitLocusTable = false;
/**
* Do not tabulate interval statistics (mean, median, quartiles AND # intervals by sample by coverage)
*/
@ -162,8 +151,52 @@ public class DepthOfCoverageWalker extends LocusWalker<Map<DoCOutputType.Partiti
*/
@Argument(fullName = "omitDepthOutputAtEachBase", shortName = "omitBaseOutput", doc = "Will omit the output of the depth of coverage at each base, which should result in speedup", required = false)
boolean omitDepthOutput = false;
/**
* Path to the RefSeq file for use in aggregating coverage statistics over genes
*/
@Argument(fullName = "calculateCoverageOverGenes", shortName = "geneList", doc = "Calculate the coverage statistics over this list of genes. Currently accepts RefSeq.", required = false)
File refSeqGeneList = null;
/**
* The format of the output file
*/
@Argument(fullName = "outputFormat", doc = "the format of the output file (e.g. csv, table, rtable); defaults to r-readable table", required = false)
String outputFormat = "rtable";
// ---------------------------------------------------------------------------
//
// Advanced arguments
//
// ---------------------------------------------------------------------------
@Advanced
@Argument(fullName = "includeRefNSites", doc = "If provided, sites with reference N bases but with coverage from neighboring reads will be included in DoC calculations.", required = false)
boolean includeRefNBases = false;
@Advanced
@Argument(fullName = "printBinEndpointsAndExit", doc = "Prints the bin values and exits immediately. Use to calibrate what bins you want before running on data.", required = false)
boolean printBinEndpointsAndExit = false;
/**
* Sets the low-coverage cutoff for granular binning. All loci with depth < START are counted in the first bin.
*/
@Advanced
@Argument(fullName = "start", doc = "Starting (left endpoint) for granular binning", required = false)
int start = 1;
/**
* Sets the high-coverage cutoff for granular binning. All loci with depth > END are counted in the last bin.
*/
@Advanced
@Argument(fullName = "stop", doc = "Ending (right endpoint) for granular binning", required = false)
int stop = 500;
/**
* Sets the number of bins for granular binning
*/
@Advanced
@Argument(fullName = "nBins", doc = "Number of bins to use for granular binning", required = false)
int nBins = 499;
/**
* Do not tabulate the sample summary statistics (total, mean, median, quartile coverage per sample)
*/
@ -174,27 +207,22 @@ public class DepthOfCoverageWalker extends LocusWalker<Map<DoCOutputType.Partiti
*/
@Argument(fullName = "partitionType", shortName = "pt", doc = "Partition type for depth of coverage. Defaults to sample. Can be any combination of sample, readgroup, library.", required = false)
Set<DoCOutputType.Partition> partitionTypes = EnumSet.of(DoCOutputType.Partition.sample);
/**
* Consider a spanning deletion as contributing to coverage. Also enables deletion counts in per-base output.
*/
@Advanced
@Argument(fullName = "includeDeletions", shortName = "dels", doc = "Include information on deletions", required = false)
boolean includeDeletions = false;
@Advanced
@Argument(fullName = "ignoreDeletionSites", doc = "Ignore sites consisting only of deletions", required = false)
boolean ignoreDeletionSites = false;
/**
* Path to the RefSeq file for use in aggregating coverage statistics over genes
*/
@Argument(fullName = "calculateCoverageOverGenes", shortName = "geneList", doc = "Calculate the coverage statistics over this list of genes. Currently accepts RefSeq.", required = false)
File refSeqGeneList = null;
/**
* The format of the output file
*/
@Argument(fullName = "outputFormat", doc = "the format of the output file (e.g. csv, table, rtable); defaults to r-readable table", required = false)
String outputFormat = "rtable";
/**
* A coverage threshold for summarizing (e.g. % bases >= CT for each sample)
*/
@Advanced
@Argument(fullName = "summaryCoverageThreshold", shortName = "ct", doc = "for summary file outputs, report the % of bases coverd to >= this number. Defaults to 15; can take multiple arguments.", required = false)
int[] coverageThresholds = {15};
@ -334,24 +362,29 @@ public class DepthOfCoverageWalker extends LocusWalker<Map<DoCOutputType.Partiti
}
public Map<DoCOutputType.Partition,Map<String,int[]>> map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
if (includeRefNBases || BaseUtils.isRegularBase(ref.getBase())) {
if ( ! omitDepthOutput ) {
getCorrectStream(null, DoCOutputType.Aggregation.locus, DoCOutputType.FileType.summary).printf("%s",ref.getLocus()); // yes: print locus in map, and the rest of the info in reduce (for eventual cumulatives)
//System.out.printf("\t[log]\t%s",ref.getLocus());
}
if ( ! omitDepthOutput ) {
getCorrectStream(null, DoCOutputType.Aggregation.locus, DoCOutputType.FileType.summary).printf("%s",ref.getLocus()); // yes: print locus in map, and the rest of the info in reduce (for eventual cumulatives)
//System.out.printf("\t[log]\t%s",ref.getLocus());
return CoverageUtils.getBaseCountsByPartition(context,minMappingQuality,maxMappingQuality,minBaseQuality,maxBaseQuality,partitionTypes);
} else {
return null;
}
return CoverageUtils.getBaseCountsByPartition(context,minMappingQuality,maxMappingQuality,minBaseQuality,maxBaseQuality,partitionTypes);
}
public CoveragePartitioner reduce(Map<DoCOutputType.Partition,Map<String,int[]>> thisMap, CoveragePartitioner prevReduce) {
if ( ! omitDepthOutput ) {
//checkOrder(prevReduce); // tests prevReduce.getIdentifiersByType().get(t) against the initialized header order
printDepths(getCorrectStream(null, DoCOutputType.Aggregation.locus, DoCOutputType.FileType.summary),thisMap,prevReduce.getIdentifiersByType());
// this is an additional iteration through thisMap, plus dealing with IO, so should be much slower without
// turning on omit
}
if ( thisMap != null ) { // skip sites we didn't want to include in the calculation (ref Ns)
if ( ! omitDepthOutput ) {
//checkOrder(prevReduce); // tests prevReduce.getIdentifiersByType().get(t) against the initialized header order
printDepths(getCorrectStream(null, DoCOutputType.Aggregation.locus, DoCOutputType.FileType.summary),thisMap,prevReduce.getIdentifiersByType());
// this is an additional iteration through thisMap, plus dealing with IO, so should be much slower without
// turning on omit
}
prevReduce.update(thisMap); // note that in "useBoth" cases, this method alters the thisMap object
prevReduce.update(thisMap); // note that in "useBoth" cases, this method alters the thisMap object
}
return prevReduce;
}

View File

@ -253,7 +253,7 @@ public class UnifiedGenotyperEngine {
VariantContext vcInput = UnifiedGenotyperEngine.getVCFromAllelesRod(tracker, ref, rawContext.getLocation(), false, logger, UAC.alleles);
if ( vcInput == null )
return null;
vc = new VariantContextBuilder(vcInput).source("UG_call").noID().referenceBaseForIndel(ref.getBase()).attributes(new HashMap<String, Object>()).filters(new HashSet<String>()).make();
vc = new VariantContextBuilder("UG_call", ref.getLocus().getContig(), ref.getLocus().getStart(), ref.getLocus().getStart(), vcInput.getAlleles()).make();
} else {
// deal with bad/non-standard reference bases
if ( !Allele.acceptableAlleleBases(new byte[]{ref.getBase()}) )

View File

@ -544,12 +544,15 @@ public abstract class AbstractVCFCodec implements FeatureCodec, NameAwareCodec {
}
/**
* return true if this is a symbolic allele (e.g. <SOMETAG>) otherwise false
* return true if this is a symbolic allele (e.g. <SOMETAG>) or
* structural variation breakend (with [ or ]), otherwise false
* @param allele the allele to check
* @return true if the allele is a symbolic allele, otherwise false
*/
private static boolean isSymbolicAllele(String allele) {
return (allele != null && allele.startsWith("<") && allele.endsWith(">") && allele.length() > 2);
return (allele != null && allele.length() > 2 &&
((allele.startsWith("<") && allele.endsWith(">")) ||
(allele.contains("[") || allele.contains("]"))));
}
/**

View File

@ -205,6 +205,7 @@ public abstract class AbstractReadBackedPileup<RBP extends AbstractReadBackedPil
protected abstract AbstractReadBackedPileup<RBP, PE> createNewPileup(GenomeLoc loc, PileupElementTracker<PE> pileupElementTracker);
protected abstract PE createNewPileupElement(GATKSAMRecord read, int offset, boolean isDeletion, boolean isBeforeDeletion, boolean isBeforeInsertion, boolean isNextToSoftClip);
protected abstract PE createNewPileupElement(GATKSAMRecord read, int offset, boolean isDeletion, boolean isBeforeDeletion, boolean isBeforeInsertion, boolean isNextToSoftClip, String nextEventBases, int nextEventLength );
// --------------------------------------------------------
//

View File

@ -48,7 +48,7 @@ public class ExtendedEventPileupElement extends PileupElement {
public ExtendedEventPileupElement(GATKSAMRecord read, int offset, int eventLength, String eventBases, Type type) {
super(read, offset, type == Type.DELETION, false, false, false); // extended events are slated for removal
super(read, offset, type == Type.DELETION, false, false, false,null,-1); // extended events are slated for removal
this.read = read;
this.offset = offset;
this.eventLength = eventLength;

View File

@ -27,6 +27,10 @@ public class PileupElement implements Comparable<PileupElement> {
protected final boolean isBeforeDeletion;
protected final boolean isBeforeInsertion;
protected final boolean isNextToSoftClip;
protected final int eventLength;
protected final String eventBases; // if it is a deletion, we do not have information about the actual deleted bases
// in the read itself, so we fill the string with D's; for insertions we keep actual inserted bases
/**
* Creates a new pileup element.
@ -37,12 +41,15 @@ public class PileupElement implements Comparable<PileupElement> {
* @param isBeforeDeletion whether or not this base is before a deletion
* @param isBeforeInsertion whether or not this base is before an insertion
* @param isNextToSoftClip whether or not this base is next to a soft clipped base
* @param nextEventBases bases in event in case element comes before insertion or deletion
* @param nextEventLength length of next event in case it's insertion or deletion
*/
@Requires({
"read != null",
"offset >= -1",
"offset <= read.getReadLength()"})
public PileupElement(final GATKSAMRecord read, final int offset, final boolean isDeletion, final boolean isBeforeDeletion, final boolean isBeforeInsertion, final boolean isNextToSoftClip) {
public PileupElement(final GATKSAMRecord read, final int offset, final boolean isDeletion, final boolean isBeforeDeletion, final boolean isBeforeInsertion, final boolean isNextToSoftClip,
final String nextEventBases, final int nextEventLength) {
if (offset < 0 && isDeletion)
throw new ReviewedStingException("Pileup Element cannot create a deletion with a negative offset");
@ -52,8 +59,19 @@ public class PileupElement implements Comparable<PileupElement> {
this.isBeforeDeletion = isBeforeDeletion;
this.isBeforeInsertion = isBeforeInsertion;
this.isNextToSoftClip = isNextToSoftClip;
if (isBeforeInsertion)
eventBases = nextEventBases;
else
eventBases = null; // ignore argument in any other case
if (isBeforeDeletion || isBeforeInsertion)
eventLength = nextEventLength;
else
eventLength = -1;
}
public PileupElement(final GATKSAMRecord read, final int offset, final boolean isDeletion, final boolean isBeforeDeletion, final boolean isBeforeInsertion, final boolean isNextToSoftClip) {
this(read,offset, isDeletion, isBeforeDeletion, isBeforeInsertion, isNextToSoftClip, null, -1);
}
public boolean isDeletion() {
return isDeletion;
}
@ -104,6 +122,20 @@ public class PileupElement implements Comparable<PileupElement> {
return getBaseDeletionQual(offset);
}
/**
* Returns length of the event (number of inserted or deleted bases
*/
public int getEventLength() {
return eventLength;
}
/**
* Returns actual sequence of inserted bases, or a null if the event is a deletion or if there is no event in the associated read.
*/
public String getEventBases() {
return eventBases;
}
public int getMappingQual() {
return read.getMappingQuality();
}

View File

@ -99,6 +99,11 @@ public class ReadBackedExtendedEventPileupImpl extends AbstractReadBackedPileup<
protected ExtendedEventPileupElement createNewPileupElement(GATKSAMRecord read, int offset, boolean isDeletion, boolean isBeforeDeletion, boolean isBeforeInsertion, boolean isNextToSoftClip) {
throw new UnsupportedOperationException("Not enough information provided to create a new pileup element");
}
@Override
protected ExtendedEventPileupElement createNewPileupElement(GATKSAMRecord read, int offset, boolean isDeletion, boolean isBeforeDeletion, boolean isBeforeInsertion,
boolean isNextToSoftClip,String nextEventBases, int nextEventLength) {
throw new UnsupportedOperationException("Not enough information provided to create a new pileup element");
}
/**

View File

@ -71,7 +71,13 @@ public class ReadBackedPileupImpl extends AbstractReadBackedPileup<ReadBackedPil
}
@Override
protected PileupElement createNewPileupElement(GATKSAMRecord read, int offset, boolean isDeletion, boolean isBeforeDeletion, boolean isBeforeInsertion, boolean isNextToSoftClip) {
return new PileupElement(read, offset, isDeletion, isBeforeDeletion, isBeforeInsertion, isNextToSoftClip);
protected PileupElement createNewPileupElement(GATKSAMRecord read, int offset, boolean isDeletion, boolean isBeforeDeletion, boolean isBeforeInsertion,
boolean isNextToSoftClip) {
return new PileupElement(read, offset, isDeletion, isBeforeDeletion, isBeforeInsertion, isNextToSoftClip, null,0);
}
protected PileupElement createNewPileupElement(GATKSAMRecord read, int offset, boolean isDeletion, boolean isBeforeDeletion, boolean isBeforeInsertion,
boolean isNextToSoftClip,String nextEventBases, final int nextEventLength) {
return new PileupElement(read, offset, isDeletion, isBeforeDeletion, isBeforeInsertion, isNextToSoftClip, nextEventBases,nextEventLength);
}
}

View File

@ -36,8 +36,10 @@ public class AlignmentStartWithNoTiesComparator implements Comparator<SAMRecord>
result = cmpContig;
else {
if (r1.getAlignmentStart() < r2.getAlignmentStart()) result = -1;
else result = 1;
if (r1.getAlignmentStart() < r2.getAlignmentStart())
result = -1;
else
result = 1;
}
}

View File

@ -212,7 +212,13 @@ public class Allele implements Comparable<Allele> {
* @return true if the bases represent a symbolic allele
*/
public static boolean wouldBeSymbolicAllele(byte[] bases) {
return bases.length > 2 && bases[0] == '<' && bases[bases.length-1] == '>';
if ( bases.length <= 2 )
return false;
else {
final String strBases = new String(bases);
return (bases[0] == '<' && bases[bases.length-1] == '>') ||
(strBases.contains("[") || strBases.contains("]"));
}
}
/**

View File

@ -6,6 +6,7 @@ import net.sf.samtools.SAMRecord;
import net.sf.samtools.util.CloseableIterator;
import org.broadinstitute.sting.gatk.filters.ReadFilter;
import org.broadinstitute.sting.utils.Utils;
import org.broadinstitute.sting.utils.pileup.PileupElement;
import org.broadinstitute.sting.utils.pileup.ReadBackedPileup;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import org.testng.Assert;
@ -85,6 +86,55 @@ public class LocusIteratorByStateUnitTest extends BaseTest {
Assert.assertTrue(foundExtendedEventPileup,"Extended event pileup not found");
}
@Test
public void testIndelsInRegularPileup() {
final byte[] bases = new byte[] {'A','A','A','A','A','A','A','A','A','A'};
final byte[] indelBases = new byte[] {'A','A','A','A','C','T','A','A','A','A','A','A'};
// create a test version of the Reads object
ReadProperties readAttributes = createTestReadProperties();
JVMUtils.setFieldValue(JVMUtils.findField(ReadProperties.class,"generateExtendedEvents"),readAttributes,true);
SAMRecord before = ArtificialSAMUtils.createArtificialRead(header,"before",0,1,10);
before.setReadBases(bases);
before.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
before.setCigarString("10M");
SAMRecord during = ArtificialSAMUtils.createArtificialRead(header,"during",0,2,10);
during.setReadBases(indelBases);
during.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20});
during.setCigarString("4M2I6M");
SAMRecord after = ArtificialSAMUtils.createArtificialRead(header,"after",0,3,10);
after.setReadBases(bases);
after.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
after.setCigarString("10M");
List<SAMRecord> reads = Arrays.asList(before,during,after);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads,readAttributes);
boolean foundIndel = false;
while (li.hasNext()) {
AlignmentContext context = li.next();
if(!context.hasBasePileup())
continue;
ReadBackedPileup pileup = context.getBasePileup().getBaseFilteredPileup(10);
for (PileupElement p : pileup) {
if (p.isBeforeInsertion()) {
foundIndel = true;
Assert.assertEquals(p.getEventLength(), 2, "Wrong event length");
Assert.assertEquals(p.getEventBases(), "CT", "Inserted bases are incorrect");
break;
}
}
}
Assert.assertTrue(foundIndel,"Indel in pileup not found");
}
/**
* Right now, the GATK's extended event pileup DOES NOT include reads which stop immediately before an insertion

View File

@ -94,4 +94,14 @@ public class DepthOfCoverageIntegrationTest extends WalkerTest {
execute("testNoCoverageDueToFiltering",spec);
}
public void testRefNHandling(boolean includeNs, final String md5) {
String command = "-R " + b37KGReference + " -L 20:26,319,565-26,319,575 -I " + validationDataLocation + "NA12878.HiSeq.WGS.bwa.cleaned.recal.hg19.20.bam -T DepthOfCoverage -baseCounts --omitIntervalStatistics --omitLocusTable --omitPerSampleStats -o %s";
if ( includeNs ) command += " --includeRefNSites";
WalkerTestSpec spec = new WalkerTestSpec(command, 1, Arrays.asList(md5));
executeTest("Testing DoC " + (includeNs ? "with" : "without") + " reference Ns", spec);
}
@Test public void testRefNWithNs() { testRefNHandling(true, "24cd2da2e4323ce6fd76217ba6dc2834"); }
@Test public void testRefNWithoutNs() { testRefNHandling(false, "4fc0f1a2e968f777d693abcefd4fb7af"); }
}

View File

@ -9,7 +9,7 @@ import java.util.List;
public class VCFIntegrationTest extends WalkerTest {
@Test
@Test(enabled = true)
public void testReadingAndWritingWitHNoChanges() {
String md5ofInputVCF = "a990ba187a69ca44cb9bc2bb44d00447";
@ -25,4 +25,18 @@ public class VCFIntegrationTest extends WalkerTest {
WalkerTestSpec spec2 = new WalkerTestSpec(test2, 1, Arrays.asList(md5ofInputVCF));
executeTest("Test Variants To VCF from new output", spec2);
}
@Test
// See https://getsatisfaction.com/gsa/topics/support_vcf_4_1_structural_variation_breakend_alleles?utm_content=topic_link&utm_medium=email&utm_source=new_topic
public void testReadingAndWritingBreakpointAlleles() {
String testVCF = testDir + "breakpoint-example.vcf";
//String testVCF = validationDataLocation + "multiallelic.vcf";
String baseCommand = "-R " + b37KGReference + " -NO_HEADER -o %s ";
String test1 = baseCommand + "-T SelectVariants -V " + testVCF;
WalkerTestSpec spec1 = new WalkerTestSpec(test1, 1, Arrays.asList("76075307afd26b4db6234795d9fb3c2f"));
executeTest("Test reading and writing breakpoint VCF", spec1);
}
}

View File

@ -0,0 +1,6 @@
##fileformat=VCFv4.1
#CHROM POS ID REF ALT QUAL FILTER INFO
22 50 bnd_W G G]22:6000] 6 PASS SVTYPE=BND;MATEID=bnd_Y
22 51 bnd_V T ]22:55]T 6 PASS SVTYPE=BND;MATEID=bnd_U
22 55 bnd_U C C[22:51[ 6 PASS SVTYPE=BND;MATEID=bnd_V
22 6000 bnd_Y A A]22:50] 6 PASS SVTYPE=BND;MATEID=bnd_W