Instead of having the padded reference base be some hackish attribute it is now an actual variable in the Variant Context class. More importantly, we now always require that it be present when padding is necessary - and validate as such upon construction of the VC. This cleans up the interface significantly because we no longer require that a reference base be passed in when writing a VC/VCF record.
This commit is contained in:
parent
65c5d55b72
commit
7c89fe01b3
|
|
@ -87,8 +87,8 @@ public class VCFWriterStorage implements Storage<VCFWriterStorage>, VCFWriter {
|
|||
writer.writeHeader(stub.getVCFHeader());
|
||||
}
|
||||
|
||||
public void add(VariantContext vc, byte ref) {
|
||||
writer.add(vc, ref);
|
||||
public void add(VariantContext vc) {
|
||||
writer.add(vc);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -117,7 +117,7 @@ public class VCFWriterStorage implements Storage<VCFWriterStorage>, VCFWriter {
|
|||
BasicFeatureSource<VariantContext> source = BasicFeatureSource.getFeatureSource(file.getAbsolutePath(), new VCFCodec(), false);
|
||||
|
||||
for ( VariantContext vc : source.iterator() ) {
|
||||
target.writer.add(vc, vc.getReferenceBaseForIndel());
|
||||
target.writer.add(vc);
|
||||
}
|
||||
|
||||
source.close();
|
||||
|
|
|
|||
|
|
@ -192,8 +192,8 @@ public class VCFWriterStub implements Stub<VCFWriter>, VCFWriter {
|
|||
/**
|
||||
* @{inheritDoc}
|
||||
*/
|
||||
public void add(VariantContext vc, byte ref) {
|
||||
outputTracker.getStorage(this).add(vc,ref);
|
||||
public void add(VariantContext vc) {
|
||||
outputTracker.getStorage(this).add(vc);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -127,14 +127,13 @@ public class VariantContextAdaptors {
|
|||
Map<String, Object> attributes = new HashMap<String, Object>();
|
||||
attributes.put(VariantContext.ID_KEY, dbsnp.getRsID());
|
||||
|
||||
if ( sawNullAllele ) {
|
||||
int index = dbsnp.getStart() - ref.getWindow().getStart() - 1;
|
||||
if ( index < 0 )
|
||||
return null; // we weren't given enough reference context to create the VariantContext
|
||||
attributes.put(VariantContext.REFERENCE_BASE_FOR_INDEL_KEY, new Byte(ref.getBases()[index]));
|
||||
}
|
||||
Collection<Genotype> genotypes = null;
|
||||
VariantContext vc = new VariantContext(name, dbsnp.getChr(), dbsnp.getStart() - (sawNullAllele ? 1 : 0),dbsnp.getEnd(), alleles, genotypes, VariantContext.NO_NEG_LOG_10PERROR, null, attributes);
|
||||
int index = dbsnp.getStart() - ref.getWindow().getStart() - 1;
|
||||
if ( index < 0 )
|
||||
return null; // we weren't given enough reference context to create the VariantContext
|
||||
Byte refBaseForIndel = new Byte(ref.getBases()[index]);
|
||||
|
||||
Map<String, Genotype> genotypes = null;
|
||||
VariantContext vc = new VariantContext(name, dbsnp.getChr(), dbsnp.getStart() - (sawNullAllele ? 1 : 0), dbsnp.getEnd(), alleles, genotypes, VariantContext.NO_NEG_LOG_10PERROR, null, attributes, refBaseForIndel);
|
||||
return vc;
|
||||
} else
|
||||
return null; // can't handle anything else
|
||||
|
|
|
|||
|
|
@ -225,12 +225,12 @@ public class VariantAnnotator extends RodWalker<Integer, Integer> {
|
|||
|
||||
if ( ! indelsOnly ) {
|
||||
for ( VariantContext annotatedVC : annotatedVCs )
|
||||
vcfWriter.add(annotatedVC, ref.getBase());
|
||||
vcfWriter.add(annotatedVC);
|
||||
} else {
|
||||
// check to see if the buffered context is different (in location) this context
|
||||
if ( indelBufferContext != null && ! VariantContextUtils.getLocation(getToolkit().getGenomeLocParser(),indelBufferContext.iterator().next()).equals(VariantContextUtils.getLocation(getToolkit().getGenomeLocParser(),annotatedVCs.iterator().next())) ) {
|
||||
for ( VariantContext annotatedVC : indelBufferContext )
|
||||
vcfWriter.add(annotatedVC, ref.getBase());
|
||||
vcfWriter.add(annotatedVC);
|
||||
indelBufferContext = annotatedVCs;
|
||||
} else {
|
||||
indelBufferContext = annotatedVCs;
|
||||
|
|
|
|||
|
|
@ -127,7 +127,7 @@ public class BeagleOutputToVCFWalker extends RodWalker<Integer, Integer> {
|
|||
return 0;
|
||||
|
||||
if (vc_input.isFiltered()) {
|
||||
vcfWriter.add(vc_input, ref.getBase());
|
||||
vcfWriter.add(vc_input);
|
||||
return 1;
|
||||
}
|
||||
List<Object> r2rods = tracker.getReferenceMetaData(R2_ROD_NAME);
|
||||
|
|
@ -333,7 +333,7 @@ public class BeagleOutputToVCFWalker extends RodWalker<Integer, Integer> {
|
|||
}
|
||||
|
||||
|
||||
vcfWriter.add(VariantContext.modifyAttributes(filteredVC,attributes), ref.getBase());
|
||||
vcfWriter.add(VariantContext.modifyAttributes(filteredVC,attributes));
|
||||
|
||||
|
||||
return 1;
|
||||
|
|
|
|||
|
|
@ -171,20 +171,20 @@ public class ProduceBeagleInputWalker extends RodWalker<Integer, Integer> {
|
|||
logger.debug(String.format("boot: %d, test: %d, total: %d", bootstrapSetSize, testSetSize, bootstrapSetSize+testSetSize+1));
|
||||
if ( (bootstrapSetSize+1.0)/(1.0+bootstrapSetSize+testSetSize) <= bootstrap ) {
|
||||
if ( bootstrapVCFOutput != null ) {
|
||||
bootstrapVCFOutput.add(VariantContext.modifyFilters(validation, BOOTSTRAP_FILTER), ref.getBase() );
|
||||
bootstrapVCFOutput.add(VariantContext.modifyFilters(validation, BOOTSTRAP_FILTER));
|
||||
}
|
||||
bootstrapSetSize++;
|
||||
return true;
|
||||
} else {
|
||||
if ( bootstrapVCFOutput != null ) {
|
||||
bootstrapVCFOutput.add(validation,ref.getBase());
|
||||
bootstrapVCFOutput.add(validation);
|
||||
}
|
||||
testSetSize++;
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if ( validation != null && bootstrapVCFOutput != null ) {
|
||||
bootstrapVCFOutput.add(validation,ref.getBase());
|
||||
bootstrapVCFOutput.add(validation);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -110,7 +110,7 @@ public class VariantsToBeagleUnphasedWalker extends RodWalker<Integer, Integer>
|
|||
|
||||
// if we are holding it back and we are writing a bootstrap VCF, write it out
|
||||
if ( makeMissing && bootstrapVCFOutput != null ) {
|
||||
bootstrapVCFOutput.add(vc, ref.getBase());
|
||||
bootstrapVCFOutput.add(vc);
|
||||
}
|
||||
|
||||
// regardless, all sites are written to the unphased genotypes file, marked as missing if appropriate
|
||||
|
|
|
|||
|
|
@ -272,7 +272,7 @@ public class VariantFiltrationWalker extends RodWalker<Integer, Integer> {
|
|||
else
|
||||
filteredVC = new VariantContext(vc.getSource(), vc.getChr(), vc.getStart(), vc.getEnd(), vc.getAlleles(), genotypes, vc.getNegLog10PError(), filters, vc.getAttributes());
|
||||
|
||||
writer.add( filteredVC, context.getReferenceContext().getBase() );
|
||||
writer.add(filteredVC);
|
||||
}
|
||||
|
||||
public Integer reduce(Integer value, Integer sum) {
|
||||
|
|
|
|||
|
|
@ -93,7 +93,7 @@ public class UGCalcLikelihoods extends LocusWalker<VariantCallContext, Integer>
|
|||
|
||||
public VariantCallContext map(RefMetaDataTracker tracker, ReferenceContext refContext, AlignmentContext rawContext) {
|
||||
VariantContext call = UG_engine.calculateLikelihoods(tracker, refContext, rawContext);
|
||||
return call == null ? null : new VariantCallContext(call, refContext.getBase(), true);
|
||||
return call == null ? null : new VariantCallContext(call, true);
|
||||
}
|
||||
|
||||
public Integer reduceInit() { return 0; }
|
||||
|
|
@ -107,7 +107,7 @@ public class UGCalcLikelihoods extends LocusWalker<VariantCallContext, Integer>
|
|||
return sum;
|
||||
|
||||
try {
|
||||
writer.add(value, value.refBase);
|
||||
writer.add(value);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException(e.getMessage() + "; this is often caused by using the --assume_single_sample_reads argument with the wrong sample name");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -115,7 +115,7 @@ public class UGCallVariants extends RodWalker<VariantCallContext, Integer> {
|
|||
try {
|
||||
Map<String, Object> attrs = new HashMap<String, Object>(value.getAttributes());
|
||||
VariantContextUtils.calculateChromosomeCounts(value, attrs, true);
|
||||
writer.add(VariantContext.modifyAttributes(value, attrs), value.refBase);
|
||||
writer.add(VariantContext.modifyAttributes(value, attrs));
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException(e.getMessage() + "; this is often caused by using the --assume_single_sample_reads argument with the wrong sample name");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -227,7 +227,7 @@ public class UnifiedGenotyper extends LocusWalker<VariantCallContext, UnifiedGen
|
|||
try {
|
||||
// we are actually making a call
|
||||
sum.nCallsMade++;
|
||||
writer.add(value, value.refBase);
|
||||
writer.add(value);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException(e.getMessage() + "; this is often caused by using the --assume_single_sample_reads argument with the wrong sample name");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -258,7 +258,7 @@ public class UnifiedGenotyperEngine {
|
|||
vc = annotationEngine.annotateContext(tracker, ref, stratifiedContexts, vc);
|
||||
}
|
||||
|
||||
return new VariantCallContext(vc, ref.getBase(), false);
|
||||
return new VariantCallContext(vc, false);
|
||||
}
|
||||
|
||||
private VariantContext createVariantContextFromLikelihoods(ReferenceContext refContext, Allele refAllele, Map<String, MultiallelicGenotypeLikelihoods> GLs) {
|
||||
|
|
@ -300,7 +300,8 @@ public class UnifiedGenotyperEngine {
|
|||
genotypes,
|
||||
VariantContext.NO_NEG_LOG_10PERROR,
|
||||
null,
|
||||
null);
|
||||
null,
|
||||
refContext.getBase());
|
||||
}
|
||||
|
||||
// private method called by both UnifiedGenotyper and UGCallVariants entry points into the engine
|
||||
|
|
@ -425,7 +426,7 @@ public class UnifiedGenotyperEngine {
|
|||
myAlleles.add(vc.getReference());
|
||||
}
|
||||
VariantContext vcCall = new VariantContext("UG_call", loc.getContig(), loc.getStart(), endLoc,
|
||||
myAlleles, genotypes, phredScaledConfidence/10.0, passesCallThreshold(phredScaledConfidence) ? null : filter, attributes);
|
||||
myAlleles, genotypes, phredScaledConfidence/10.0, passesCallThreshold(phredScaledConfidence) ? null : filter, attributes, refContext.getBase());
|
||||
|
||||
if ( annotationEngine != null ) {
|
||||
// first off, we want to use the *unfiltered* and *unBAQed* context for the annotations
|
||||
|
|
@ -439,9 +440,7 @@ public class UnifiedGenotyperEngine {
|
|||
vcCall = annotationEngine.annotateContext(tracker, refContext, stratifiedContexts, vcCall);
|
||||
}
|
||||
|
||||
VariantCallContext call = new VariantCallContext(vcCall, confidentlyCalled(phredScaledConfidence, PofF));
|
||||
call.setRefBase(refContext.getBase());
|
||||
return call;
|
||||
return new VariantCallContext(vcCall, confidentlyCalled(phredScaledConfidence, PofF));
|
||||
}
|
||||
|
||||
private int calculateEndPos(Set<Allele> alleles, Allele refAllele, GenomeLoc loc) {
|
||||
|
|
|
|||
|
|
@ -36,7 +36,6 @@ import org.broadinstitute.sting.utils.variantcontext.VariantContext;
|
|||
* Useful helper class to communicate the results of calculateGenotype to framework
|
||||
*/
|
||||
public class VariantCallContext extends VariantContext {
|
||||
public byte refBase;
|
||||
|
||||
// Was the site called confidently, either reference or variant?
|
||||
public boolean confidentlyCalled = false;
|
||||
|
|
@ -55,16 +54,6 @@ public class VariantCallContext extends VariantContext {
|
|||
this.shouldEmit = shouldEmit;
|
||||
}
|
||||
|
||||
VariantCallContext(VariantContext vc, byte ref, boolean confidentlyCalledP) {
|
||||
super(vc);
|
||||
this.refBase = ref;
|
||||
this.confidentlyCalled = confidentlyCalledP;
|
||||
}
|
||||
|
||||
public void setRefBase(byte ref) {
|
||||
this.refBase = ref;
|
||||
}
|
||||
|
||||
/* these methods are only implemented for GENOTYPE_GIVEN_ALLELES MODE */
|
||||
//todo -- expand these methods to all modes
|
||||
|
||||
|
|
|
|||
|
|
@ -1033,8 +1033,8 @@ public class SomaticIndelDetectorWalker extends ReadWalker<Integer,Integer> {
|
|||
filters.add("NoCall");
|
||||
}
|
||||
VariantContext vc = new VariantContext("IGv2_Indel_call", refName, start, stop, alleles, genotypes,
|
||||
-1.0 /* log error */, filters, null);
|
||||
vcf.add(vc,refBases[(int)start-1]);
|
||||
-1.0 /* log error */, filters, null, refBases[(int)start-1]);
|
||||
vcf.add(vc);
|
||||
}
|
||||
|
||||
/** Fills l with appropriate alleles depending on whether call is insertion or deletion
|
||||
|
|
@ -1130,8 +1130,8 @@ public class SomaticIndelDetectorWalker extends ReadWalker<Integer,Integer> {
|
|||
}
|
||||
|
||||
VariantContext vc = new VariantContext("IGv2_Indel_call", refName, start, stop, alleles, genotypes,
|
||||
-1.0 /* log error */, filters, attrs);
|
||||
vcf.add(vc,refBases[(int)start-1]);
|
||||
-1.0 /* log error */, filters, attrs, refBases[(int)start-1]);
|
||||
vcf.add(vc);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
|||
|
|
@ -91,7 +91,7 @@ public class MergeAndMatchHaplotypes extends RodWalker<Integer, Integer> {
|
|||
}
|
||||
|
||||
VariantContext newvc = new VariantContext(SOURCE_NAME, pbt.getChr(), pbt.getStart(), pbt.getStart(), pbt.getAlleles(), genotypes, pbt.getNegLog10PError(), pbt.getFilters(), pbt.getAttributes());
|
||||
vcfWriter.add(newvc, ref.getBase());
|
||||
vcfWriter.add(newvc);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -118,7 +118,7 @@ public class MergeSegregatingAlternateAllelesVCFWriter implements VCFWriter {
|
|||
innerWriter.close();
|
||||
}
|
||||
|
||||
public void add(VariantContext vc, byte refBase) {
|
||||
public void add(VariantContext vc) {
|
||||
if (useSingleSample != null) { // only want to output context for one sample
|
||||
Genotype sampGt = vc.getGenotype(useSingleSample);
|
||||
if (sampGt != null) // TODO: subContextFromGenotypes() does not handle any INFO fields [AB, HaplotypeScore, MQ, etc.]. Note that even SelectVariants.subsetRecord() only handles AC,AN,AF, and DP!
|
||||
|
|
@ -138,11 +138,11 @@ public class MergeSegregatingAlternateAllelesVCFWriter implements VCFWriter {
|
|||
|
||||
if (curVcIsNotFiltered) { // still need to wait before can release vc
|
||||
logger.debug("Waiting for new variant " + VariantContextUtils.getLocation(genomeLocParser, vc));
|
||||
vcfrWaitingToMerge = new VCFRecord(vc, refBase, false);
|
||||
vcfrWaitingToMerge = new VCFRecord(vc, false);
|
||||
}
|
||||
else if (!emitOnlyMergedRecords) { // filtered records are never merged
|
||||
logger.debug("DIRECTLY output " + VariantContextUtils.getLocation(genomeLocParser, vc));
|
||||
innerWriter.add(vc, refBase);
|
||||
innerWriter.add(vc);
|
||||
}
|
||||
}
|
||||
else { // waiting to merge vcfrWaitingToMerge
|
||||
|
|
@ -151,7 +151,7 @@ public class MergeSegregatingAlternateAllelesVCFWriter implements VCFWriter {
|
|||
if (!curVcIsNotFiltered) {
|
||||
if (!emitOnlyMergedRecords) { // filtered records are never merged
|
||||
logger.debug("Caching unprocessed output " + VariantContextUtils.getLocation(genomeLocParser, vc));
|
||||
filteredVcfrList.add(new VCFRecord(vc, refBase, false));
|
||||
filteredVcfrList.add(new VCFRecord(vc, false));
|
||||
}
|
||||
}
|
||||
else { // waiting to merge vcfrWaitingToMerge, and curVcIsNotFiltered. So, attempt to merge them:
|
||||
|
|
@ -188,14 +188,14 @@ public class MergeSegregatingAlternateAllelesVCFWriter implements VCFWriter {
|
|||
addedAttribs.putAll(mergedVc.getAttributes());
|
||||
mergedVc = VariantContext.modifyAttributes(mergedVc, addedAttribs);
|
||||
|
||||
vcfrWaitingToMerge = new VCFRecord(mergedVc, vcfrWaitingToMerge.refBase, true);
|
||||
vcfrWaitingToMerge = new VCFRecord(mergedVc, true);
|
||||
numMergedRecords++;
|
||||
}
|
||||
}
|
||||
|
||||
if (!mergedRecords) {
|
||||
stopWaitingToMerge();
|
||||
vcfrWaitingToMerge = new VCFRecord(vc, refBase, false);
|
||||
vcfrWaitingToMerge = new VCFRecord(vc, false);
|
||||
}
|
||||
logger.debug("Merged? = " + mergedRecords);
|
||||
}
|
||||
|
|
@ -210,11 +210,11 @@ public class MergeSegregatingAlternateAllelesVCFWriter implements VCFWriter {
|
|||
}
|
||||
|
||||
if (!emitOnlyMergedRecords || vcfrWaitingToMerge.resultedFromMerge)
|
||||
innerWriter.add(vcfrWaitingToMerge.vc, vcfrWaitingToMerge.refBase);
|
||||
innerWriter.add(vcfrWaitingToMerge.vc);
|
||||
vcfrWaitingToMerge = null;
|
||||
|
||||
for (VCFRecord vcfr : filteredVcfrList)
|
||||
innerWriter.add(vcfr.vc, vcfr.refBase);
|
||||
innerWriter.add(vcfr.vc);
|
||||
filteredVcfrList.clear();
|
||||
}
|
||||
|
||||
|
|
@ -257,12 +257,10 @@ public class MergeSegregatingAlternateAllelesVCFWriter implements VCFWriter {
|
|||
|
||||
private static class VCFRecord {
|
||||
public VariantContext vc;
|
||||
public byte refBase;
|
||||
public boolean resultedFromMerge;
|
||||
|
||||
public VCFRecord(VariantContext vc, byte refBase, boolean resultedFromMerge) {
|
||||
public VCFRecord(VariantContext vc, boolean resultedFromMerge) {
|
||||
this.vc = vc;
|
||||
this.refBase = refBase;
|
||||
this.resultedFromMerge = resultedFromMerge;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -312,7 +312,7 @@ public class PhaseByTransmission extends RodWalker<Integer, Integer> {
|
|||
|
||||
VariantContext newvc = VariantContext.modifyGenotypes(vc, genotypeMap);
|
||||
|
||||
vcfWriter.add(newvc, ref.getBase());
|
||||
vcfWriter.add(newvc);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -25,20 +25,10 @@ package org.broadinstitute.sting.gatk.walkers.phasing;
|
|||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.broadinstitute.sting.utils.codecs.vcf.VCFWriter;
|
||||
import org.broadinstitute.sting.utils.variantcontext.Allele;
|
||||
import org.broadinstitute.sting.utils.variantcontext.VariantContext;
|
||||
|
||||
public class WriteVCF {
|
||||
public static void writeVCF(VariantContext vc, VCFWriter writer, Logger logger) {
|
||||
byte refBase;
|
||||
if (!vc.isIndel()) {
|
||||
Allele refAllele = vc.getReference();
|
||||
refBase = SNPallelePair.getSingleBase(refAllele);
|
||||
}
|
||||
else {
|
||||
refBase = vc.getReferenceBaseForIndel();
|
||||
}
|
||||
|
||||
writer.add(vc, refBase);
|
||||
writer.add(vc);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -204,9 +204,9 @@ public class ApplyRecalibration extends RodWalker<Integer, Integer> {
|
|||
filters.add(filterString);
|
||||
vc = VariantContext.modifyFilters(vc, filters);
|
||||
}
|
||||
vcfWriter.add( VariantContext.modifyPErrorFiltersAndAttributes(vc, vc.getNegLog10PError(), vc.getFilters(), attrs), ref.getBase() );
|
||||
vcfWriter.add( VariantContext.modifyPErrorFiltersAndAttributes(vc, vc.getNegLog10PError(), vc.getFilters(), attrs) );
|
||||
} else { // valid VC but not compatible with this mode, so just emit the variant untouched
|
||||
vcfWriter.add( vc, ref.getBase() );
|
||||
vcfWriter.add( vc );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -158,7 +158,7 @@ public class CombineVariants extends RodWalker<Integer, Integer> {
|
|||
|
||||
if ( ASSUME_IDENTICAL_SAMPLES ) {
|
||||
for ( final VariantContext vc : vcs ) {
|
||||
vcfWriter.add( vc, ref.getBase() );
|
||||
vcfWriter.add(vc);
|
||||
}
|
||||
|
||||
return vcs.isEmpty() ? 0 : 1;
|
||||
|
|
@ -183,7 +183,7 @@ public class CombineVariants extends RodWalker<Integer, Integer> {
|
|||
if ( VCsByType.containsKey(type) )
|
||||
mergedVCs.add(VariantContextUtils.simpleMerge(getToolkit().getGenomeLocParser(), VCsByType.get(type),
|
||||
priority, filteredRecordsMergeType, genotypeMergeOption, true, printComplexMerges,
|
||||
ref.getBase(), SET_KEY, filteredAreUncalled, MERGE_INFO_WITH_MAX_AC));
|
||||
SET_KEY, filteredAreUncalled, MERGE_INFO_WITH_MAX_AC));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -198,7 +198,7 @@ public class CombineVariants extends RodWalker<Integer, Integer> {
|
|||
VariantContext annotatedMergedVC = VariantContext.modifyAttributes(mergedVC, attributes);
|
||||
if ( minimalVCF )
|
||||
annotatedMergedVC = VariantContextUtils.pruneVariantContext(annotatedMergedVC, Arrays.asList(SET_KEY));
|
||||
vcfWriter.add(annotatedMergedVC, ref.getBase());
|
||||
vcfWriter.add(annotatedMergedVC);
|
||||
}
|
||||
|
||||
return vcs.isEmpty() ? 0 : 1;
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ public class FilterLiftedVariants extends RodWalker<Integer, Integer> {
|
|||
if ( failed )
|
||||
failedLocs++;
|
||||
else
|
||||
writer.add(vc, ref[0]);
|
||||
writer.add(vc);
|
||||
}
|
||||
|
||||
public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
|
||||
|
|
|
|||
|
|
@ -93,7 +93,7 @@ public class LeftAlignVariants extends RodWalker<Integer, Integer> {
|
|||
if ( vc.isBiallelic() && vc.isIndel() )
|
||||
return writeLeftAlignedIndel(vc, ref);
|
||||
else {
|
||||
writer.add(vc, ref.getBase());
|
||||
writer.add(vc);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
|
@ -109,7 +109,7 @@ public class LeftAlignVariants extends RodWalker<Integer, Integer> {
|
|||
indelLength = vc.getAlternateAllele(0).length();
|
||||
|
||||
if ( indelLength > 200 ) {
|
||||
writer.add(vc, ref.getBase());
|
||||
writer.add(vc);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
|
@ -137,17 +137,12 @@ public class LeftAlignVariants extends RodWalker<Integer, Integer> {
|
|||
byte[] newBases = new byte[indelLength];
|
||||
System.arraycopy((vc.isDeletion() ? refSeq : originalIndel), indelIndex, newBases, 0, indelLength);
|
||||
Allele newAllele = Allele.create(newBases, vc.isDeletion());
|
||||
newVC = updateAllele(newVC, newAllele);
|
||||
newVC = updateAllele(newVC, newAllele, refSeq[indelIndex-1]);
|
||||
|
||||
// we need to update the reference base just in case it changed
|
||||
Map<String, Object> attrs = new HashMap<String, Object>(newVC.getAttributes());
|
||||
attrs.put(VariantContext.REFERENCE_BASE_FOR_INDEL_KEY, refSeq[indelIndex-1]);
|
||||
newVC = VariantContext.modifyAttributes(newVC, attrs);
|
||||
|
||||
writer.add(newVC, refSeq[indelIndex-1]);
|
||||
writer.add(newVC);
|
||||
return 1;
|
||||
} else {
|
||||
writer.add(vc, ref.getBase());
|
||||
writer.add(vc);
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
|
@ -173,7 +168,7 @@ public class LeftAlignVariants extends RodWalker<Integer, Integer> {
|
|||
return hap;
|
||||
}
|
||||
|
||||
public static VariantContext updateAllele(VariantContext vc, Allele newAllele) {
|
||||
public static VariantContext updateAllele(VariantContext vc, Allele newAllele, Byte refBaseForIndel) {
|
||||
// create a mapping from original allele to new allele
|
||||
HashMap<Allele, Allele> alleleMap = new HashMap<Allele, Allele>(vc.getAlleles().size());
|
||||
if ( newAllele.isReference() ) {
|
||||
|
|
@ -197,6 +192,6 @@ public class LeftAlignVariants extends RodWalker<Integer, Integer> {
|
|||
newGenotypes.put(genotype.getKey(), Genotype.modifyAlleles(genotype.getValue(), newAlleles));
|
||||
}
|
||||
|
||||
return new VariantContext(vc.getSource(), vc.getChr(), vc.getStart(), vc.getEnd(), alleleMap.values(), newGenotypes, vc.getNegLog10PError(), vc.filtersWereApplied() ? vc.getFilters() : null, vc.getAttributes());
|
||||
return new VariantContext(vc.getSource(), vc.getChr(), vc.getStart(), vc.getEnd(), alleleMap.values(), newGenotypes, vc.getNegLog10PError(), vc.filtersWereApplied() ? vc.getFilters() : null, vc.getAttributes(), refBaseForIndel);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -125,14 +125,14 @@ public class LiftoverVariants extends RodWalker<Integer, Integer> {
|
|||
vc = VariantContext.modifyAttributes(vc, attrs);
|
||||
}
|
||||
|
||||
VariantContext newVC = VariantContext.createVariantContextWithPaddedAlleles(vc, ref.getBase(), false);
|
||||
VariantContext newVC = VariantContext.createVariantContextWithPaddedAlleles(vc, false);
|
||||
if ( originalVC.isSNP() && originalVC.isBiallelic() && VariantContextUtils.getSNPSubstitutionType(originalVC) != VariantContextUtils.getSNPSubstitutionType(newVC) ) {
|
||||
logger.warn(String.format("VCF at %s / %d => %s / %d is switching substitution type %s/%s to %s/%s",
|
||||
originalVC.getChr(), originalVC.getStart(), newVC.getChr(), newVC.getStart(),
|
||||
originalVC.getReference(), originalVC.getAlternateAllele(0), newVC.getReference(), newVC.getAlternateAllele(0)));
|
||||
}
|
||||
|
||||
writer.add(vc, ref.getBase());
|
||||
writer.add(vc);
|
||||
successfulIntervals++;
|
||||
} else {
|
||||
failedIntervals++;
|
||||
|
|
|
|||
|
|
@ -101,9 +101,9 @@ public class RandomlySplitVariants extends RodWalker<Integer, Integer> {
|
|||
for ( VariantContext vc : vcs ) {
|
||||
int random = GenomeAnalysisEngine.getRandomGenerator().nextInt(1000);
|
||||
if ( random < iFraction )
|
||||
vcfWriter1.add(vc, ref.getBase());
|
||||
vcfWriter1.add(vc);
|
||||
else
|
||||
vcfWriter2.add(vc, ref.getBase());
|
||||
vcfWriter2.add(vc);
|
||||
}
|
||||
|
||||
return 1;
|
||||
|
|
|
|||
|
|
@ -25,38 +25,29 @@
|
|||
package org.broadinstitute.sting.gatk.walkers.variantutils;
|
||||
|
||||
import org.broadinstitute.sting.commandline.Hidden;
|
||||
import org.broadinstitute.sting.commandline.Input;
|
||||
import org.broadinstitute.sting.utils.MathUtils;
|
||||
import org.broadinstitute.sting.utils.codecs.vcf.*;
|
||||
import org.broadinstitute.sting.utils.exceptions.UserException;
|
||||
import org.broadinstitute.sting.utils.text.XReadLines;
|
||||
import org.broadinstitute.sting.utils.variantcontext.*;
|
||||
import org.broadinstitute.sting.gatk.GenomeAnalysisEngine;
|
||||
import org.broadinstitute.sting.utils.MendelianViolation;
|
||||
import org.broadinstitute.sting.utils.variantcontext.VariantContext;
|
||||
import org.broadinstitute.sting.commandline.Argument;
|
||||
import org.broadinstitute.sting.commandline.Hidden;
|
||||
import org.broadinstitute.sting.commandline.Output;
|
||||
import org.broadinstitute.sting.gatk.GenomeAnalysisEngine;
|
||||
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
|
||||
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
|
||||
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
|
||||
import org.broadinstitute.sting.gatk.walkers.RMD;
|
||||
import org.broadinstitute.sting.gatk.walkers.Requires;
|
||||
import org.broadinstitute.sting.gatk.walkers.RodWalker;
|
||||
import org.broadinstitute.sting.utils.MathUtils;
|
||||
import org.broadinstitute.sting.utils.MendelianViolation;
|
||||
import org.broadinstitute.sting.utils.SampleUtils;
|
||||
import org.broadinstitute.sting.utils.codecs.vcf.*;
|
||||
import org.broadinstitute.sting.utils.variantcontext.Allele;
|
||||
import org.broadinstitute.sting.utils.variantcontext.Genotype;
|
||||
import org.broadinstitute.sting.utils.variantcontext.VariantContext;
|
||||
import org.broadinstitute.sting.utils.variantcontext.VariantContextUtils;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.PrintStream;
|
||||
import java.lang.annotation.AnnotationFormatError;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
|
|
@ -140,16 +131,13 @@ public class SelectVariants extends RodWalker<Integer, Integer> {
|
|||
/* Private class used to store the intermediate variants in the integer random selection process */
|
||||
private class RandomVariantStructure {
|
||||
private VariantContext vc;
|
||||
private byte refBase;
|
||||
|
||||
RandomVariantStructure(VariantContext vcP, byte refBaseP) {
|
||||
RandomVariantStructure(VariantContext vcP) {
|
||||
vc = vcP;
|
||||
refBase = refBaseP;
|
||||
}
|
||||
|
||||
public void set (VariantContext vcP, byte refBaseP) {
|
||||
public void set (VariantContext vcP) {
|
||||
vc = vcP;
|
||||
refBase = refBaseP;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -374,7 +362,7 @@ public class SelectVariants extends RodWalker<Integer, Integer> {
|
|||
randomlyAddVariant(++variantNumber, sub, ref.getBase());
|
||||
}
|
||||
else if (!SELECT_RANDOM_FRACTION || (!KEEP_AF_SPECTRUM && GenomeAnalysisEngine.getRandomGenerator().nextDouble() < fractionRandom)) {
|
||||
vcfWriter.add(sub, ref.getBase());
|
||||
vcfWriter.add(sub);
|
||||
}
|
||||
else {
|
||||
if (SELECT_RANDOM_FRACTION && KEEP_AF_SPECTRUM ) {
|
||||
|
|
@ -422,7 +410,7 @@ public class SelectVariants extends RodWalker<Integer, Integer> {
|
|||
|
||||
//System.out.format("%s .. %4.4f\n",afo.toString(), af);
|
||||
if (GenomeAnalysisEngine.getRandomGenerator().nextDouble() < fractionRandom * afBoost * afBoost)
|
||||
vcfWriter.add(sub, ref.getBase());
|
||||
vcfWriter.add(sub);
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -529,7 +517,7 @@ public class SelectVariants extends RodWalker<Integer, Integer> {
|
|||
if (SELECT_RANDOM_NUMBER) {
|
||||
int positionToPrint = positionToAdd;
|
||||
for (int i=0; i<numRandom; i++) {
|
||||
vcfWriter.add(variantArray[positionToPrint].vc, variantArray[positionToPrint].refBase);
|
||||
vcfWriter.add(variantArray[positionToPrint].vc);
|
||||
positionToPrint = nextCircularPosition(positionToPrint);
|
||||
}
|
||||
}
|
||||
|
|
@ -592,13 +580,13 @@ public class SelectVariants extends RodWalker<Integer, Integer> {
|
|||
|
||||
private void randomlyAddVariant(int rank, VariantContext vc, byte refBase) {
|
||||
if (nVariantsAdded < numRandom)
|
||||
variantArray[nVariantsAdded++] = new RandomVariantStructure(vc, refBase);
|
||||
variantArray[nVariantsAdded++] = new RandomVariantStructure(vc);
|
||||
|
||||
else {
|
||||
double v = GenomeAnalysisEngine.getRandomGenerator().nextDouble();
|
||||
double t = (1.0/(rank-numRandom+1));
|
||||
if ( v < t) {
|
||||
variantArray[positionToAdd].set(vc, refBase);
|
||||
variantArray[positionToAdd].set(vc);
|
||||
nVariantsAdded++;
|
||||
positionToAdd = nextCircularPosition(positionToAdd);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ import java.util.*;
|
|||
*/
|
||||
@Reference(window=@Window(start=0,stop=40))
|
||||
@Requires(value={},referenceMetaData=@RMD(name=VariantValidationAssessor.INPUT_VARIANT_ROD_BINDING_NAME, type=VariantContext.class))
|
||||
public class VariantValidationAssessor extends RodWalker<Pair<VariantContext, Byte>,Integer> {
|
||||
public class VariantValidationAssessor extends RodWalker<VariantContext,Integer> {
|
||||
|
||||
public static final String INPUT_VARIANT_ROD_BINDING_NAME = "variant";
|
||||
|
||||
|
|
@ -68,7 +68,7 @@ public class VariantValidationAssessor extends RodWalker<Pair<VariantContext, By
|
|||
private TreeSet<String> sampleNames = null;
|
||||
|
||||
// variant context records
|
||||
private ArrayList<Pair<VariantContext, Byte>> records = new ArrayList<Pair<VariantContext, Byte>>();
|
||||
private ArrayList<VariantContext> records = new ArrayList<VariantContext>();
|
||||
|
||||
// statistics
|
||||
private int numRecords = 0;
|
||||
|
|
@ -89,7 +89,7 @@ public class VariantValidationAssessor extends RodWalker<Pair<VariantContext, By
|
|||
return 0;
|
||||
}
|
||||
|
||||
public Pair<VariantContext, Byte> map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
|
||||
public VariantContext map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
|
||||
if ( tracker == null )
|
||||
return null;
|
||||
|
||||
|
|
@ -104,7 +104,7 @@ public class VariantValidationAssessor extends RodWalker<Pair<VariantContext, By
|
|||
return addVariantInformationToCall(ref, vc);
|
||||
}
|
||||
|
||||
public Integer reduce(Pair<VariantContext, Byte> call, Integer numVariants) {
|
||||
public Integer reduce(VariantContext call, Integer numVariants) {
|
||||
if ( call != null ) {
|
||||
numVariants++;
|
||||
records.add(call);
|
||||
|
|
@ -155,12 +155,12 @@ public class VariantValidationAssessor extends RodWalker<Pair<VariantContext, By
|
|||
|
||||
vcfwriter.writeHeader(new VCFHeader(hInfo, SampleUtils.getUniqueSamplesFromRods(getToolkit(), inputNames)));
|
||||
|
||||
for ( Pair<VariantContext, Byte> record : records )
|
||||
vcfwriter.add(record.first, record.second);
|
||||
for ( VariantContext record : records )
|
||||
vcfwriter.add(record);
|
||||
}
|
||||
|
||||
|
||||
private Pair<VariantContext, Byte> addVariantInformationToCall(ReferenceContext ref, VariantContext vContext) {
|
||||
private VariantContext addVariantInformationToCall(ReferenceContext ref, VariantContext vContext) {
|
||||
|
||||
// check possible filters
|
||||
double hwPvalue = hardyWeinbergCalculation(vContext);
|
||||
|
|
@ -202,9 +202,7 @@ public class VariantValidationAssessor extends RodWalker<Pair<VariantContext, By
|
|||
infoMap.put(VCFConstants.ALLELE_COUNT_KEY, String.format("%d", altAlleleCount));
|
||||
infoMap.put(VCFConstants.ALLELE_NUMBER_KEY, String.format("%d", vContext.getChromosomeCount()));
|
||||
|
||||
vContext = VariantContext.modifyAttributes(vContext, infoMap);
|
||||
|
||||
return new Pair<VariantContext, Byte>(vContext, ref.getBase());
|
||||
return VariantContext.modifyAttributes(vContext, infoMap);
|
||||
}
|
||||
|
||||
private double hardyWeinbergCalculation(VariantContext vc) {
|
||||
|
|
|
|||
|
|
@ -78,8 +78,8 @@ public class VariantsToTable extends RodWalker<Integer, Integer> {
|
|||
getters.put("REF", new Getter() {
|
||||
public String get(VariantContext vc) {
|
||||
String x = "";
|
||||
if (vc.hasAttribute(VariantContext.REFERENCE_BASE_FOR_INDEL_KEY)) {
|
||||
Byte refByte = (Byte)(vc.getAttribute(VariantContext.REFERENCE_BASE_FOR_INDEL_KEY));
|
||||
if ( vc.hasReferenceBaseForIndel() ) {
|
||||
Byte refByte = vc.getReferenceBaseForIndel();
|
||||
x=x+new String(new byte[]{refByte});
|
||||
}
|
||||
return x+vc.getReference().getDisplayString();
|
||||
|
|
@ -90,8 +90,8 @@ public class VariantsToTable extends RodWalker<Integer, Integer> {
|
|||
StringBuilder x = new StringBuilder();
|
||||
int n = vc.getAlternateAlleles().size();
|
||||
if ( n == 0 ) return ".";
|
||||
if (vc.hasAttribute(VariantContext.REFERENCE_BASE_FOR_INDEL_KEY)) {
|
||||
Byte refByte = (Byte)(vc.getAttribute(VariantContext.REFERENCE_BASE_FOR_INDEL_KEY));
|
||||
if ( vc.hasReferenceBaseForIndel() ) {
|
||||
Byte refByte = vc.getReferenceBaseForIndel();
|
||||
x.append(new String(new byte[]{refByte}));
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -149,9 +149,10 @@ public class VariantsToVCF extends RodWalker<Integer, Integer> {
|
|||
VariantContext vc = VariantContextAdaptors.toVariantContext(INPUT_ROD_NAME, hapmap, ref);
|
||||
if ( vc != null ) {
|
||||
if ( refBase != null ) {
|
||||
Map<String, Object> attrs = new HashMap<String, Object>(vc.getAttributes());
|
||||
attrs.put(VariantContext.REFERENCE_BASE_FOR_INDEL_KEY, refBase);
|
||||
vc = VariantContext.modifyAttributes(vc, attrs);
|
||||
// TODO -- fix me
|
||||
//Map<String, Object> attrs = new HashMap<String, Object>(vc.getAttributes());
|
||||
//attrs.put(VariantContext.REFERENCE_BASE_FOR_INDEL_KEY, refBase);
|
||||
//vc = VariantContext.modifyAttributes(vc, attrs);
|
||||
}
|
||||
hapmapVCs.add(vc);
|
||||
}
|
||||
|
|
@ -233,7 +234,7 @@ public class VariantsToVCF extends RodWalker<Integer, Integer> {
|
|||
}
|
||||
|
||||
vc = VariantContextUtils.purgeUnallowedGenotypeAttributes(vc, allowedGenotypeFormatStrings);
|
||||
vcfwriter.add(vc, ref);
|
||||
vcfwriter.add(vc);
|
||||
}
|
||||
|
||||
public Integer reduceInit() {
|
||||
|
|
|
|||
|
|
@ -567,7 +567,6 @@ public abstract class AbstractVCFCodec implements FeatureCodec, NameAwareCodec,
|
|||
|
||||
// set the reference base for indels in the attributes
|
||||
Map<String,Object> attributes = new TreeMap<String,Object>(inputVC.getAttributes());
|
||||
attributes.put(VariantContext.REFERENCE_BASE_FOR_INDEL_KEY, new Byte(inputVC.getReference().getBases()[0]));
|
||||
|
||||
Map<Allele, Allele> originalToTrimmedAlleleMap = new HashMap<Allele, Allele>();
|
||||
|
||||
|
|
@ -611,7 +610,7 @@ public abstract class AbstractVCFCodec implements FeatureCodec, NameAwareCodec,
|
|||
genotypes.put(sample.getKey(), Genotype.modifyAlleles(sample.getValue(), trimmedAlleles));
|
||||
|
||||
}
|
||||
return new VariantContext(inputVC.getSource(), inputVC.getChr(), inputVC.getStart(), inputVC.getEnd(), alleles, genotypes, inputVC.getNegLog10PError(), inputVC.filtersWereApplied() ? inputVC.getFilters() : null, attributes);
|
||||
return new VariantContext(inputVC.getSource(), inputVC.getChr(), inputVC.getStart(), inputVC.getEnd(), alleles, genotypes, inputVC.getNegLog10PError(), inputVC.filtersWereApplied() ? inputVC.getFilters() : null, attributes, new Byte(inputVC.getReference().getBases()[0]));
|
||||
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -105,9 +105,8 @@ public abstract class SortingVCFWriterBase implements VCFWriter {
|
|||
* add a record to the file
|
||||
*
|
||||
* @param vc the Variant Context object
|
||||
* @param refBase the ref base
|
||||
*/
|
||||
public void add(VariantContext vc, byte refBase) {
|
||||
public void add(VariantContext vc) {
|
||||
/* Note that the code below does not prevent the successive add()-ing of: (chr1, 10), (chr20, 200), (chr15, 100)
|
||||
since there is no implicit ordering of chromosomes:
|
||||
*/
|
||||
|
|
@ -122,7 +121,7 @@ public abstract class SortingVCFWriterBase implements VCFWriter {
|
|||
|
||||
noteCurrentRecord(vc); // possibly overwritten
|
||||
|
||||
queue.add(new VCFRecord(vc, refBase));
|
||||
queue.add(new VCFRecord(vc));
|
||||
emitSafeRecords();
|
||||
}
|
||||
|
||||
|
|
@ -133,7 +132,7 @@ public abstract class SortingVCFWriterBase implements VCFWriter {
|
|||
// No need to wait, waiting for nothing, or before what we're waiting for:
|
||||
if (emitUnsafe || mostUpstreamWritableLoc == null || firstRec.vc.getStart() <= mostUpstreamWritableLoc) {
|
||||
queue.poll();
|
||||
innerWriter.add(firstRec.vc, firstRec.refBase);
|
||||
innerWriter.add(firstRec.vc);
|
||||
}
|
||||
else {
|
||||
break;
|
||||
|
|
@ -143,7 +142,7 @@ public abstract class SortingVCFWriterBase implements VCFWriter {
|
|||
|
||||
/**
|
||||
* Gets a string representation of this object.
|
||||
* @return
|
||||
* @return a string representation of this object
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
|
|
@ -158,11 +157,9 @@ public abstract class SortingVCFWriterBase implements VCFWriter {
|
|||
|
||||
private static class VCFRecord {
|
||||
public VariantContext vc;
|
||||
public byte refBase;
|
||||
|
||||
public VCFRecord(VariantContext vc, byte refBase) {
|
||||
public VCFRecord(VariantContext vc) {
|
||||
this.vc = vc;
|
||||
this.refBase = refBase;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -202,20 +202,18 @@ public class StandardVCFWriter implements VCFWriter {
|
|||
* add a record to the file
|
||||
*
|
||||
* @param vc the Variant Context object
|
||||
* @param refBase the ref base used for indels
|
||||
*/
|
||||
public void add(VariantContext vc, byte refBase) {
|
||||
add(vc, refBase, false);
|
||||
public void add(VariantContext vc) {
|
||||
add(vc, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* add a record to the file
|
||||
*
|
||||
* @param vc the Variant Context object
|
||||
* @param refBase the ref base used for indels
|
||||
* @param refBaseShouldBeAppliedToEndOfAlleles *** THIS SHOULD BE FALSE EXCEPT FOR AN INDEL AT THE EXTREME BEGINNING OF A CONTIG (WHERE THERE IS NO PREVIOUS BASE, SO WE USE THE BASE AFTER THE EVENT INSTEAD)
|
||||
*/
|
||||
public void add(VariantContext vc, byte refBase, boolean refBaseShouldBeAppliedToEndOfAlleles) {
|
||||
public void add(VariantContext vc, boolean refBaseShouldBeAppliedToEndOfAlleles) {
|
||||
if ( mHeader == null )
|
||||
throw new IllegalStateException("The VCF Header must be written before records can be added: " + locationString());
|
||||
|
||||
|
|
@ -223,7 +221,7 @@ public class StandardVCFWriter implements VCFWriter {
|
|||
vc = VariantContext.modifyGenotypes(vc, null);
|
||||
|
||||
try {
|
||||
vc = VariantContext.createVariantContextWithPaddedAlleles(vc, refBase, refBaseShouldBeAppliedToEndOfAlleles);
|
||||
vc = VariantContext.createVariantContextWithPaddedAlleles(vc, refBaseShouldBeAppliedToEndOfAlleles);
|
||||
|
||||
// if we are doing on the fly indexing, add the record ***before*** we write any bytes
|
||||
if ( indexer != null ) indexer.addFeature(vc, positionalStream.getPosition());
|
||||
|
|
@ -285,7 +283,7 @@ public class StandardVCFWriter implements VCFWriter {
|
|||
Map<String, String> infoFields = new TreeMap<String, String>();
|
||||
for ( Map.Entry<String, Object> field : vc.getAttributes().entrySet() ) {
|
||||
String key = field.getKey();
|
||||
if ( key.equals(VariantContext.ID_KEY) || key.equals(VariantContext.REFERENCE_BASE_FOR_INDEL_KEY) || key.equals(VariantContext.UNPARSED_GENOTYPE_MAP_KEY) || key.equals(VariantContext.UNPARSED_GENOTYPE_PARSER_KEY) )
|
||||
if ( key.equals(VariantContext.ID_KEY) || key.equals(VariantContext.UNPARSED_GENOTYPE_MAP_KEY) || key.equals(VariantContext.UNPARSED_GENOTYPE_PARSER_KEY) )
|
||||
continue;
|
||||
|
||||
String outputValue = formatVCFField(field.getValue());
|
||||
|
|
|
|||
|
|
@ -14,5 +14,5 @@ public interface VCFWriter {
|
|||
*/
|
||||
public void close();
|
||||
|
||||
public void add(VariantContext vc, byte refBase);
|
||||
public void add(VariantContext vc);
|
||||
}
|
||||
|
|
@ -27,15 +27,15 @@ public class MutableVariantContext extends VariantContext {
|
|||
}
|
||||
|
||||
public MutableVariantContext(String source, String contig, long start, long stop, Collection<Allele> alleles) {
|
||||
this(source, contig, start, stop, alleles, NO_GENOTYPES, InferredGeneticContext.NO_NEG_LOG_10PERROR, null, null);
|
||||
super(source, contig, start, stop, alleles, NO_GENOTYPES, InferredGeneticContext.NO_NEG_LOG_10PERROR, null, null);
|
||||
}
|
||||
|
||||
public MutableVariantContext(String source, String contig, long start, long stop, Collection<Allele> alleles, Collection<Genotype> genotypes) {
|
||||
this(source, contig, start, stop, alleles, genotypes, InferredGeneticContext.NO_NEG_LOG_10PERROR, null, null);
|
||||
super(source, contig, start, stop, alleles, genotypes, InferredGeneticContext.NO_NEG_LOG_10PERROR, null, null);
|
||||
}
|
||||
|
||||
public MutableVariantContext(VariantContext parent) {
|
||||
this(parent.getSource(), parent.contig, parent.start, parent.stop, parent.getAlleles(), parent.getGenotypes(), parent.getNegLog10PError(), parent.getFilters(), parent.getAttributes());
|
||||
super(parent.getSource(), parent.contig, parent.start, parent.stop, parent.getAlleles(), parent.getGenotypes(), parent.getNegLog10PError(), parent.getFilters(), parent.getAttributes(), parent.getReferenceBaseForIndel());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import org.broad.tribble.TribbleException;
|
|||
import org.broad.tribble.util.ParsingUtils;
|
||||
import org.broadinstitute.sting.utils.codecs.vcf.VCFConstants;
|
||||
import org.broadinstitute.sting.utils.codecs.vcf.VCFParser;
|
||||
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
|
|
@ -163,11 +164,12 @@ import java.util.*;
|
|||
public class VariantContext implements Feature { // to enable tribble intergration
|
||||
protected InferredGeneticContext commonInfo = null;
|
||||
public final static double NO_NEG_LOG_10PERROR = InferredGeneticContext.NO_NEG_LOG_10PERROR;
|
||||
public final static String REFERENCE_BASE_FOR_INDEL_KEY = "_REFERENCE_BASE_FOR_INDEL_";
|
||||
public final static String UNPARSED_GENOTYPE_MAP_KEY = "_UNPARSED_GENOTYPE_MAP_";
|
||||
public final static String UNPARSED_GENOTYPE_PARSER_KEY = "_UNPARSED_GENOTYPE_PARSER_";
|
||||
public final static String ID_KEY = "ID";
|
||||
|
||||
private final Byte REFERENCE_BASE_FOR_INDEL;
|
||||
|
||||
public final static Set<String> PASSES_FILTERS = Collections.unmodifiableSet(new LinkedHashSet<String>());
|
||||
|
||||
/** The location of this VariantContext */
|
||||
|
|
@ -205,6 +207,24 @@ public class VariantContext implements Feature { // to enable tribble intergrati
|
|||
// ---------------------------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
/**
|
||||
* the complete constructor. Makes a complete VariantContext from its arguments
|
||||
*
|
||||
* @param source source
|
||||
* @param contig the contig
|
||||
* @param start the start base (one based)
|
||||
* @param stop the stop reference base (one based)
|
||||
* @param alleles alleles
|
||||
* @param genotypes genotypes map
|
||||
* @param negLog10PError qual
|
||||
* @param filters filters: use null for unfiltered and empty set for passes filters
|
||||
* @param attributes attributes
|
||||
* @param referenceBaseForIndel padded reference base
|
||||
*/
|
||||
public VariantContext(String source, String contig, long start, long stop, Collection<Allele> alleles, Map<String, Genotype> genotypes, double negLog10PError, Set<String> filters, Map<String, ?> attributes, Byte referenceBaseForIndel) {
|
||||
this(source, contig, start, stop, alleles, genotypes, negLog10PError, filters, attributes, referenceBaseForIndel, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* the complete constructor. Makes a complete VariantContext from its arguments
|
||||
*
|
||||
|
|
@ -219,7 +239,7 @@ public class VariantContext implements Feature { // to enable tribble intergrati
|
|||
* @param attributes attributes
|
||||
*/
|
||||
public VariantContext(String source, String contig, long start, long stop, Collection<Allele> alleles, Map<String, Genotype> genotypes, double negLog10PError, Set<String> filters, Map<String, ?> attributes) {
|
||||
this(source, contig, start, stop, alleles, genotypes, negLog10PError, filters, attributes, false);
|
||||
this(source, contig, start, stop, alleles, genotypes, negLog10PError, filters, attributes, null, false);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -239,7 +259,7 @@ public class VariantContext implements Feature { // to enable tribble intergrati
|
|||
* @param attributes attributes
|
||||
*/
|
||||
public VariantContext(String source, String contig, long start, long stop, Collection<Allele> alleles, double negLog10PError, Set<String> filters, Map<String, ?> attributes) {
|
||||
this(source, contig, start, stop, alleles, NO_GENOTYPES, negLog10PError, filters, attributes, true);
|
||||
this(source, contig, start, stop, alleles, NO_GENOTYPES, negLog10PError, filters, attributes, null, true);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -256,7 +276,7 @@ public class VariantContext implements Feature { // to enable tribble intergrati
|
|||
* @param attributes attributes
|
||||
*/
|
||||
public VariantContext(String source, String contig, long start, long stop, Collection<Allele> alleles, Collection<Genotype> genotypes, double negLog10PError, Set<String> filters, Map<String, ?> attributes) {
|
||||
this(source, contig, start, stop, alleles, genotypes != null ? genotypeCollectionToMap(new TreeMap<String, Genotype>(), genotypes) : null, negLog10PError, filters, attributes, false);
|
||||
this(source, contig, start, stop, alleles, genotypes != null ? genotypeCollectionToMap(new TreeMap<String, Genotype>(), genotypes) : null, negLog10PError, filters, attributes, null, false);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -269,7 +289,7 @@ public class VariantContext implements Feature { // to enable tribble intergrati
|
|||
* @param alleles alleles
|
||||
*/
|
||||
public VariantContext(String source, String contig, long start, long stop, Collection<Allele> alleles) {
|
||||
this(source, contig, start, stop, alleles, NO_GENOTYPES, InferredGeneticContext.NO_NEG_LOG_10PERROR, null, null, false);
|
||||
this(source, contig, start, stop, alleles, NO_GENOTYPES, InferredGeneticContext.NO_NEG_LOG_10PERROR, null, null, null, false);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -292,7 +312,7 @@ public class VariantContext implements Feature { // to enable tribble intergrati
|
|||
* @param other the VariantContext to copy
|
||||
*/
|
||||
public VariantContext(VariantContext other) {
|
||||
this(other.getSource(), other.getChr(), other.getStart(), other.getEnd() , other.getAlleles(), other.getGenotypes(), other.getNegLog10PError(), other.filtersWereApplied() ? other.getFilters() : null, other.getAttributes(), false);
|
||||
this(other.getSource(), other.getChr(), other.getStart(), other.getEnd() , other.getAlleles(), other.getGenotypes(), other.getNegLog10PError(), other.filtersWereApplied() ? other.getFilters() : null, other.getAttributes(), other.REFERENCE_BASE_FOR_INDEL, false);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -307,8 +327,13 @@ public class VariantContext implements Feature { // to enable tribble intergrati
|
|||
* @param negLog10PError qual
|
||||
* @param filters filters: use null for unfiltered and empty set for passes filters
|
||||
* @param attributes attributes
|
||||
* @param referenceBaseForIndel padded reference base
|
||||
* @param genotypesAreUnparsed true if the genotypes have not yet been parsed
|
||||
*/
|
||||
private VariantContext(String source, String contig, long start, long stop, Collection<Allele> alleles, Map<String, Genotype> genotypes, double negLog10PError, Set<String> filters, Map<String, ?> attributes, boolean genotypesAreUnparsed) {
|
||||
private VariantContext(String source, String contig, long start, long stop,
|
||||
Collection<Allele> alleles, Map<String, Genotype> genotypes,
|
||||
double negLog10PError, Set<String> filters, Map<String, ?> attributes,
|
||||
Byte referenceBaseForIndel, boolean genotypesAreUnparsed) {
|
||||
if ( contig == null ) { throw new IllegalArgumentException("Contig cannot be null"); }
|
||||
this.contig = contig;
|
||||
this.start = start;
|
||||
|
|
@ -323,6 +348,7 @@ public class VariantContext implements Feature { // to enable tribble intergrati
|
|||
|
||||
this.commonInfo = new InferredGeneticContext(source, negLog10PError, filters, attributes);
|
||||
filtersWereAppliedToContext = filters != null;
|
||||
REFERENCE_BASE_FOR_INDEL = referenceBaseForIndel;
|
||||
|
||||
if ( alleles == null ) { throw new IllegalArgumentException("Alleles cannot be null"); }
|
||||
|
||||
|
|
@ -355,23 +381,23 @@ public class VariantContext implements Feature { // to enable tribble intergrati
|
|||
// ---------------------------------------------------------------------------------------------------------
|
||||
|
||||
public static VariantContext modifyGenotypes(VariantContext vc, Map<String, Genotype> genotypes) {
|
||||
return new VariantContext(vc.getSource(), vc.getChr(), vc.getStart(), vc.getEnd(), vc.getAlleles(), genotypes, vc.getNegLog10PError(), vc.filtersWereApplied() ? vc.getFilters() : null, new HashMap<String, Object>(vc.getAttributes()), false);
|
||||
return new VariantContext(vc.getSource(), vc.getChr(), vc.getStart(), vc.getEnd(), vc.getAlleles(), genotypes, vc.getNegLog10PError(), vc.filtersWereApplied() ? vc.getFilters() : null, new HashMap<String, Object>(vc.getAttributes()), vc.getReferenceBaseForIndel(), false);
|
||||
}
|
||||
|
||||
public static VariantContext modifyLocation(VariantContext vc, String chr, int start, int end) {
|
||||
return new VariantContext(vc.getSource(), chr, start, end, vc.getAlleles(), vc.genotypes, vc.getNegLog10PError(), vc.filtersWereApplied() ? vc.getFilters() : null, new HashMap<String, Object>(vc.getAttributes()), true);
|
||||
return new VariantContext(vc.getSource(), chr, start, end, vc.getAlleles(), vc.genotypes, vc.getNegLog10PError(), vc.filtersWereApplied() ? vc.getFilters() : null, new HashMap<String, Object>(vc.getAttributes()), vc.getReferenceBaseForIndel(), true);
|
||||
}
|
||||
|
||||
public static VariantContext modifyFilters(VariantContext vc, Set<String> filters) {
|
||||
return new VariantContext(vc.getSource(), vc.getChr(), vc.getStart(), vc.getEnd() , vc.getAlleles(), vc.genotypes, vc.getNegLog10PError(), filters, new HashMap<String, Object>(vc.getAttributes()), true);
|
||||
return new VariantContext(vc.getSource(), vc.getChr(), vc.getStart(), vc.getEnd() , vc.getAlleles(), vc.genotypes, vc.getNegLog10PError(), filters, new HashMap<String, Object>(vc.getAttributes()), vc.getReferenceBaseForIndel(), true);
|
||||
}
|
||||
|
||||
public static VariantContext modifyAttributes(VariantContext vc, Map<String, Object> attributes) {
|
||||
return new VariantContext(vc.getSource(), vc.getChr(), vc.getStart(), vc.getEnd(), vc.getAlleles(), vc.genotypes, vc.getNegLog10PError(), vc.filtersWereApplied() ? vc.getFilters() : null, attributes, true);
|
||||
return new VariantContext(vc.getSource(), vc.getChr(), vc.getStart(), vc.getEnd(), vc.getAlleles(), vc.genotypes, vc.getNegLog10PError(), vc.filtersWereApplied() ? vc.getFilters() : null, attributes, vc.getReferenceBaseForIndel(), true);
|
||||
}
|
||||
|
||||
public static VariantContext modifyPErrorFiltersAndAttributes(VariantContext vc, double negLog10PError, Set<String> filters, Map<String, Object> attributes) {
|
||||
return new VariantContext(vc.getSource(), vc.getChr(), vc.getStart(), vc.getEnd(), vc.getAlleles(), vc.genotypes, negLog10PError, filters, attributes, true);
|
||||
return new VariantContext(vc.getSource(), vc.getChr(), vc.getStart(), vc.getEnd(), vc.getAlleles(), vc.genotypes, negLog10PError, filters, attributes, vc.getReferenceBaseForIndel(), true);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------------------------------------
|
||||
|
|
@ -603,6 +629,15 @@ public class VariantContext implements Feature { // to enable tribble intergrati
|
|||
return (String)commonInfo.getAttribute(ID_KEY);
|
||||
}
|
||||
|
||||
public boolean hasReferenceBaseForIndel() {
|
||||
return REFERENCE_BASE_FOR_INDEL != null;
|
||||
}
|
||||
|
||||
// the indel base that gets stripped off for indels
|
||||
public Byte getReferenceBaseForIndel() {
|
||||
return REFERENCE_BASE_FOR_INDEL;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------------------------------------
|
||||
//
|
||||
// get routines to access context info fields
|
||||
|
|
@ -1151,6 +1186,7 @@ public class VariantContext implements Feature { // to enable tribble intergrati
|
|||
|
||||
private boolean validate(boolean throwException) {
|
||||
try {
|
||||
validateReferencePadding();
|
||||
validateAlleles();
|
||||
validateGenotypes();
|
||||
} catch ( IllegalArgumentException e ) {
|
||||
|
|
@ -1163,6 +1199,13 @@ public class VariantContext implements Feature { // to enable tribble intergrati
|
|||
return true;
|
||||
}
|
||||
|
||||
private void validateReferencePadding() {
|
||||
boolean needsPadding = hasSymbolicAlleles() || (getReference().length() == getEnd() - getStart()); // off by one because padded base was removed
|
||||
|
||||
if ( needsPadding && !hasReferenceBaseForIndel() )
|
||||
throw new ReviewedStingException("Badly formed variant context at location " + getChr() + ":" + getStart() + "; no padded reference base was provided.");
|
||||
}
|
||||
|
||||
private void validateAlleles() {
|
||||
// check alleles
|
||||
boolean alreadySeenRef = false, alreadySeenNull = false;
|
||||
|
|
@ -1221,16 +1264,6 @@ public class VariantContext implements Feature { // to enable tribble intergrati
|
|||
//
|
||||
// ---------------------------------------------------------------------------------------------------------
|
||||
|
||||
// the indel base that gets stripped off for indels
|
||||
public boolean hasReferenceBaseForIndel() {
|
||||
return hasAttribute(REFERENCE_BASE_FOR_INDEL_KEY);
|
||||
}
|
||||
|
||||
// the indel base that gets stripped off for indels
|
||||
public byte getReferenceBaseForIndel() {
|
||||
return hasReferenceBaseForIndel() ? (Byte)getAttribute(REFERENCE_BASE_FOR_INDEL_KEY) : (byte)'N';
|
||||
}
|
||||
|
||||
private void determineType() {
|
||||
if ( type == null ) {
|
||||
switch ( getNAlleles() ) {
|
||||
|
|
@ -1357,8 +1390,7 @@ public class VariantContext implements Feature { // to enable tribble intergrati
|
|||
return false;
|
||||
}
|
||||
|
||||
public static VariantContext createVariantContextWithPaddedAlleles(VariantContext inputVC, byte inputRefBase, boolean refBaseShouldBeAppliedToEndOfAlleles) {
|
||||
Allele refAllele = inputVC.getReference();
|
||||
public static VariantContext createVariantContextWithPaddedAlleles(VariantContext inputVC, boolean refBaseShouldBeAppliedToEndOfAlleles) {
|
||||
|
||||
// see if we need to pad common reference base from all alleles
|
||||
boolean padVC;
|
||||
|
|
@ -1368,31 +1400,20 @@ public class VariantContext implements Feature { // to enable tribble intergrati
|
|||
long locLength = (inputVC.getEnd() - inputVC.getStart()) + 1;
|
||||
if (inputVC.hasSymbolicAlleles())
|
||||
padVC = true;
|
||||
else if (refAllele.length() == locLength)
|
||||
else if (inputVC.getReference().length() == locLength)
|
||||
padVC = false;
|
||||
else if (refAllele.length() == locLength-1)
|
||||
else if (inputVC.getReference().length() == locLength-1)
|
||||
padVC = true;
|
||||
else throw new IllegalArgumentException("Badly formed variant context at location " + String.valueOf(inputVC.getStart()) +
|
||||
" in contig " + inputVC.getChr() + ". Reference length must be at most one base shorter than location size");
|
||||
|
||||
|
||||
// nothing to do if we don't need to pad bases
|
||||
if (padVC) {
|
||||
Byte refByte;
|
||||
|
||||
Map<String,Object> attributes = inputVC.getAttributes();
|
||||
if ( !inputVC.hasReferenceBaseForIndel() )
|
||||
throw new ReviewedStingException("Badly formed variant context at location " + inputVC.getChr() + ":" + inputVC.getStart() + "; no padded reference base is available.");
|
||||
|
||||
// upper-case for consistency; note that we can safely make these casts because the input is constrained to be a byte
|
||||
inputRefBase = (byte)Character.toUpperCase((char)inputRefBase);
|
||||
if (attributes.containsKey(VariantContext.REFERENCE_BASE_FOR_INDEL_KEY))
|
||||
refByte = (Byte)attributes.get(VariantContext.REFERENCE_BASE_FOR_INDEL_KEY);
|
||||
else if (inputRefBase == 'A' || inputRefBase == 'T' || inputRefBase == 'C' || inputRefBase == 'G' || inputRefBase == 'N')
|
||||
refByte = inputRefBase;
|
||||
else
|
||||
throw new IllegalArgumentException("Error when trying to pad Variant Context at location " + String.valueOf(inputVC.getStart())
|
||||
+ " in contig " + inputVC.getChr() +
|
||||
". Either input reference base ("+(char)inputRefBase+
|
||||
", ascii code="+inputRefBase+") must be a regular base, or input VC must contain reference base key");
|
||||
Byte refByte = inputVC.getReferenceBaseForIndel();
|
||||
|
||||
List<Allele> alleles = new ArrayList<Allele>();
|
||||
Map<String, Genotype> genotypes = new TreeMap<String, Genotype>();
|
||||
|
|
@ -1444,11 +1465,7 @@ public class VariantContext implements Feature { // to enable tribble intergrati
|
|||
|
||||
// Do not change the filter state if filters were not applied to this context
|
||||
Set<String> inputVCFilters = inputVC.filtersWereAppliedToContext ? inputVC.getFilters() : null;
|
||||
return new VariantContext(inputVC.getSource(), inputVC.getChr(), inputVC.getStart(), inputVC.getEnd(), alleles, genotypes, inputVC.getNegLog10PError(),
|
||||
inputVCFilters, attributes);
|
||||
|
||||
|
||||
|
||||
return new VariantContext(inputVC.getSource(), inputVC.getChr(), inputVC.getStart(), inputVC.getEnd(), alleles, genotypes, inputVC.getNegLog10PError(), inputVCFilters, inputVC.getAttributes());
|
||||
}
|
||||
else
|
||||
return inputVC;
|
||||
|
|
|
|||
|
|
@ -295,10 +295,7 @@ public class VariantContextUtils {
|
|||
@Requires("vc != null")
|
||||
@Ensures("result != null")
|
||||
public static VariantContext sitesOnlyVariantContext(VariantContext vc) {
|
||||
return new VariantContext(vc.getSource(), vc.getChr(), vc.getStart(), vc.getEnd(),
|
||||
vc.getAlleles(), vc.getNegLog10PError(),
|
||||
vc.filtersWereApplied() ? vc.getFilters() : null,
|
||||
vc.getAttributes());
|
||||
return VariantContext.modifyGenotypes(vc, null);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -449,7 +446,7 @@ public class VariantContextUtils {
|
|||
FilteredRecordMergeType filteredRecordMergeType, GenotypeMergeType genotypeMergeOptions,
|
||||
boolean annotateOrigin, boolean printMessages, byte inputRefBase ) {
|
||||
|
||||
return simpleMerge(genomeLocParser, unsortedVCs, priorityListOfVCs, filteredRecordMergeType, genotypeMergeOptions, annotateOrigin, printMessages, inputRefBase, "set", false, false);
|
||||
return simpleMerge(genomeLocParser, unsortedVCs, priorityListOfVCs, filteredRecordMergeType, genotypeMergeOptions, annotateOrigin, printMessages, "set", false, false);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -464,7 +461,6 @@ public class VariantContextUtils {
|
|||
* @param genotypeMergeOptions merge option for genotypes
|
||||
* @param annotateOrigin should we annotate the set it came from?
|
||||
* @param printMessages should we print messages?
|
||||
* @param inputRefBase the ref base
|
||||
* @param setKey the key name of the set
|
||||
* @param filteredAreUncalled are filtered records uncalled?
|
||||
* @param mergeInfoWithMaxAC should we merge in info from the VC with maximum allele count?
|
||||
|
|
@ -472,7 +468,7 @@ public class VariantContextUtils {
|
|||
*/
|
||||
public static VariantContext simpleMerge(GenomeLocParser genomeLocParser, Collection<VariantContext> unsortedVCs, List<String> priorityListOfVCs,
|
||||
FilteredRecordMergeType filteredRecordMergeType, GenotypeMergeType genotypeMergeOptions,
|
||||
boolean annotateOrigin, boolean printMessages, byte inputRefBase, String setKey,
|
||||
boolean annotateOrigin, boolean printMessages, String setKey,
|
||||
boolean filteredAreUncalled, boolean mergeInfoWithMaxAC ) {
|
||||
if ( unsortedVCs == null || unsortedVCs.size() == 0 )
|
||||
return null;
|
||||
|
|
@ -490,7 +486,7 @@ public class VariantContextUtils {
|
|||
for (VariantContext vc : prepaddedVCs) {
|
||||
// also a reasonable place to remove filtered calls, if needed
|
||||
if ( ! filteredAreUncalled || vc.isNotFiltered() )
|
||||
VCs.add(VariantContext.createVariantContextWithPaddedAlleles(vc,inputRefBase,false));
|
||||
VCs.add(VariantContext.createVariantContextWithPaddedAlleles(vc, false));
|
||||
}
|
||||
if ( VCs.size() == 0 ) // everything is filtered out and we're filteredAreUncalled
|
||||
return null;
|
||||
|
|
|
|||
|
|
@ -120,6 +120,6 @@ public class CombineVariantsIntegrationTest extends WalkerTest {
|
|||
|
||||
@Test public void complexTestFull() { combineComplexSites("", "b5a53ee92bdaacd2bb3327e9004ae058"); }
|
||||
@Test public void complexTestMinimal() { combineComplexSites(" -minimalVCF", "df96cb3beb2dbb5e02f80abec7d3571e"); }
|
||||
@Test public void complexTestSitesOnly() { combineComplexSites(" -sites_only", "f72a178137e25dbe0b931934cdc0079d"); }
|
||||
@Test public void complexTestSitesOnly() { combineComplexSites(" -sites_only", "f704caeaaaed6711943014b847fe381a"); }
|
||||
@Test public void complexTestSitesOnlyMinimal() { combineComplexSites(" -sites_only -minimalVCF", "f704caeaaaed6711943014b847fe381a"); }
|
||||
}
|
||||
|
|
@ -70,7 +70,7 @@ public class IndexFactoryUnitTest {
|
|||
CloseableTribbleIterator<VariantContext> it = source.iterator();
|
||||
while (it.hasNext() && (counter++ < maxRecords || maxRecords == -1) ) {
|
||||
VariantContext vc = it.next();
|
||||
writer.add(vc, vc.getReferenceBaseForIndel());
|
||||
writer.add(vc);
|
||||
}
|
||||
writer.close();
|
||||
|
||||
|
|
|
|||
|
|
@ -57,8 +57,8 @@ public class VCFWriterUnitTest extends BaseTest {
|
|||
VCFHeader header = createFakeHeader(metaData,additionalColumns);
|
||||
VCFWriter writer = new StandardVCFWriter(fakeVCFFile);
|
||||
writer.writeHeader(header);
|
||||
writer.add(createVC(header),"A".getBytes()[0]);
|
||||
writer.add(createVC(header),"A".getBytes()[0]);
|
||||
writer.add(createVC(header));
|
||||
writer.add(createVC(header));
|
||||
writer.close();
|
||||
VCFCodec reader = new VCFCodec();
|
||||
AsciiLineReader lineReader;
|
||||
|
|
|
|||
|
|
@ -19,14 +19,14 @@ public class VariantContextIntegrationTest extends WalkerTest {
|
|||
|
||||
static HashMap<String, String> expectations = new HashMap<String, String>();
|
||||
static {
|
||||
expectations.put("-L 1:1-10000 --printPerLocus", "e9d96677a57bc3a10fb6d9ba942c19f0");
|
||||
expectations.put("-L 1:1-10000 --printPerLocus --takeFirstOnly", "8a1174d2b18b98e624abbe93e6af8fdd");
|
||||
expectations.put("-L 1:1-10000 --printPerLocus --onlyContextsStartinAtCurrentPosition", "3933f1fae5453c54c3f791a23de07599");
|
||||
expectations.put("-L 1:1-10000 --printPerLocus --takeFirstOnly --onlyContextsStartinAtCurrentPosition", "c9cf2f01bf045a58dcc7649fd6ea2396");
|
||||
expectations.put("-L 1:1-10000 --printPerLocus", "c44a48dd9062a435a3579145ce8d1684");
|
||||
expectations.put("-L 1:1-10000 --printPerLocus --takeFirstOnly", "fa5762fa7dcb2652ed34bcdce9ecf455");
|
||||
expectations.put("-L 1:1-10000 --printPerLocus --onlyContextsStartinAtCurrentPosition", "dfdc554c52707541d335c3fb849feaba");
|
||||
expectations.put("-L 1:1-10000 --printPerLocus --takeFirstOnly --onlyContextsStartinAtCurrentPosition", "db8ba72b557ebd698215281e5656b59c");
|
||||
expectations.put("-L 1:1-10000 --printPerLocus --onlyContextsOfType SNP", "2097e32988d603d3b353b50218c86d3b");
|
||||
expectations.put("-L 1:1-10000 --printPerLocus --onlyContextsOfType INDEL", "a103d856e8bc558c949c6e3f184e8913");
|
||||
expectations.put("-L 1:1-10000 --printPerLocus --onlyContextsOfType INDEL --onlyContextsStartinAtCurrentPosition", "5f2265ac6c6d80d64dc6e69a05c1250b");
|
||||
expectations.put("-L 1:1-10000 --printPerLocus --onlyContextsOfType MIXED", "06a3ae4c0afa23b429a9491ab7707f3c");
|
||||
expectations.put("-L 1:1-10000 --printPerLocus --onlyContextsOfType INDEL", "7f5eadb2098aafdef8bb45aac3722d03");
|
||||
expectations.put("-L 1:1-10000 --printPerLocus --onlyContextsOfType INDEL --onlyContextsStartinAtCurrentPosition", "a31b76fb8ed727616d8fb823c62bf677");
|
||||
expectations.put("-L 1:1-10000 --printPerLocus --onlyContextsOfType MIXED", "f9d30920c8834ec7c7892507a5052fb7");
|
||||
expectations.put("-L 1:1-10000 --printPerLocus --onlyContextsOfType NO_VARIATION", "39335acdb34c8a2af433dc50d619bcbc");
|
||||
}
|
||||
|
||||
|
|
@ -58,7 +58,7 @@ public class VariantContextIntegrationTest extends WalkerTest {
|
|||
// this really just tests that we are seeing the same number of objects over all of chr1
|
||||
WalkerTestSpec spec = new WalkerTestSpec( root + " -L 1" + " -o %s",
|
||||
1, // just one output file
|
||||
Arrays.asList("045a5b02c86aeb9301dc0b724da0c8f7"));
|
||||
Arrays.asList("137258e1dc490bfa83a2294c52e97ba9"));
|
||||
executeTest("testLargeScaleConversion", spec);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue