GenotypeAndValidate version 2, ready to be used.

- now it differentiates between confident REF calls and not confident calls.
- you can now use a BAM file as the truth set. 
- output is much clearer now

dataProcessingPipeline version 2, ready to be used.
- All the processing is now done at the sample level
- Reads the input bam file headers to combine all lanes of the same sample.
- Cleaning is now scattered/gathered. Inteligently breaks down in as many intervals as possible, given the dataset.
- Outputs one processed bam file per sample (and a .list file with all processed files listed)
- Much faster, low pass (read Papuans) can run in the hour queue.




git-svn-id: file:///humgen/gsa-scr1/gsa-engineering/svn_contents/trunk@5493 348d0f76-0448-11de-a6fe-93d51630548a
This commit is contained in:
carneiro 2011-03-22 20:18:02 +00:00
parent 687b2e51b4
commit 28149e5c5e
3 changed files with 186 additions and 79 deletions

View File

@ -64,4 +64,26 @@ public class VariantCallContext extends VariantContext {
public void setRefBase(byte ref) {
this.refBase = ref;
}
/* these methods are only implemented for GENOTYPE_GIVEN_ALLELES MODE */
//todo -- expand these methods to all modes
/**
*
* @param callConfidenceThreshold the Unified Argument Collection STANDARD_CONFIDENCE_FOR_CALLING
* @return true if call was confidently ref
*/
public boolean isCalledRef(double callConfidenceThreshold) {
return (confidentlyCalled && (getPhredScaledQual() < callConfidenceThreshold));
}
/**
*
* @param callConfidenceThreshold the Unified Argument Collection STANDARD_CONFIDENCE_FOR_CALLING
* @return true if call was confidently alt
*/
public boolean isCalledAlt(double callConfidenceThreshold) {
return (confidentlyCalled && (getPhredScaledQual() > callConfidenceThreshold));
}
}

View File

@ -25,6 +25,8 @@
package org.broadinstitute.sting.playground.gatk.walkers;
import org.broad.tribble.util.variantcontext.Allele;
import org.broad.tribble.util.variantcontext.Genotype;
import org.broad.tribble.util.variantcontext.MutableVariantContext;
import org.broad.tribble.util.variantcontext.VariantContext;
import org.broad.tribble.vcf.VCFHeader;
@ -66,11 +68,14 @@ import static org.broadinstitute.sting.utils.IndelUtils.isInsideExtendedIndel;
@Reference(window=@Window(start=-200,stop=200))
public class GenotypeAndValidateWalker extends RodWalker<GenotypeAndValidateWalker.CountedData, GenotypeAndValidateWalker.CountedData> {
public class GenotypeAndValidateWalker extends RodWalker<GenotypeAndValidateWalker.CountedData, GenotypeAndValidateWalker.CountedData> implements TreeReducible<GenotypeAndValidateWalker.CountedData> {
@Output(doc="File to which validated variants should be written", required=true)
@Output(doc="File to which validated variants should be written", required=false)
protected VCFWriter vcfWriter = null;
@Argument(fullName ="set_bam_truth", shortName ="bt", doc="Use the calls on the reads (bam file) as the truth dataset and validate the calls on the VCF", required=false)
private boolean bamIsTruth = false;
@Argument(fullName="minimum_base_quality_score", shortName="mbq", doc="Minimum base quality score for calling a genotype", required=false)
private int mbq = -1;
@ -86,32 +91,35 @@ public class GenotypeAndValidateWalker extends RodWalker<GenotypeAndValidateWalk
@Argument(fullName="condition_on_depth", shortName="depth", doc="Condition validation on a minimum depth of coverage by the reads", required=false)
private int minDepth = -1;
@Argument(fullName ="sample", shortName ="sn", doc="Name of the sample to validate (in case your VCF/BAM has more than one sample)", required=false)
private String sample = "";
private String compName = "alleles";
private UnifiedGenotyperEngine snpEngine;
private UnifiedGenotyperEngine indelEngine;
public static class CountedData {
private long numTP = 0L;
private long numTN = 0L;
private long numFP = 0L;
private long numFN = 0L;
private long numUncovered = 0L;
private long numConfidentCalls = 0L;
private long numNotConfidentCalls = 0L;
private long nAltCalledAlt = 0L;
private long nAltCalledRef = 0L;
private long nRefCalledAlt = 0L;
private long nRefCalledRef = 0L;
private long nNotConfidentCalls = 0L;
private long nUncovered = 0L;
/**
* Adds the values of other to this, returning this
* @param other the other object
*/
public void add(CountedData other) {
numTP += other.numTP;
numTN += other.numTN;
numFP += other.numFP;
numFN += other.numFN;
numUncovered += other.numUncovered;
numNotConfidentCalls += other.numNotConfidentCalls;
numConfidentCalls += other.numConfidentCalls;
nAltCalledAlt += other.nAltCalledAlt;
nAltCalledRef += other.nAltCalledRef;
nRefCalledAlt += other.nRefCalledAlt;
nRefCalledRef += other.nRefCalledRef;
nUncovered += other.nUncovered;
nNotConfidentCalls += other.nNotConfidentCalls;
}
}
@ -133,28 +141,31 @@ public class GenotypeAndValidateWalker extends RodWalker<GenotypeAndValidateWalk
// Initialize VCF header
Map<String, VCFHeader> header = VCFUtils.getVCFHeadersFromRodPrefix(getToolkit(), compName);
Set<String> samples = SampleUtils.getSampleList(header, VariantContextUtils.GenotypeMergeType.REQUIRE_UNIQUE);
Set<VCFHeaderLine> headerLines = VCFUtils.smartMergeHeaders(header.values(), logger);
headerLines.add(new VCFHeaderLine("source", "GenotypeAndValidate"));
vcfWriter.writeHeader(new VCFHeader(headerLines, samples));
if (vcfWriter != null) {
Map<String, VCFHeader> header = VCFUtils.getVCFHeadersFromRodPrefix(getToolkit(), compName);
Set<String> samples = SampleUtils.getSampleList(header, VariantContextUtils.GenotypeMergeType.REQUIRE_UNIQUE);
Set<VCFHeaderLine> headerLines = VCFUtils.smartMergeHeaders(header.values(), logger);
headerLines.add(new VCFHeaderLine("source", "GenotypeAndValidate"));
vcfWriter.writeHeader(new VCFHeader(headerLines, samples));
}
// Filling in SNP calling arguments for UG
UnifiedArgumentCollection uac = new UnifiedArgumentCollection();
uac.OutputMode = UnifiedGenotyperEngine.OUTPUT_MODE.EMIT_ALL_SITES;
uac.GenotypingMode = GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES;
if (!bamIsTruth) uac.GenotypingMode = GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES;
if (mbq >= 0) uac.MIN_BASE_QUALTY_SCORE = mbq;
if (deletions >= 0) uac.MAX_DELETION_FRACTION = deletions;
if (callConf >= 0) uac.STANDARD_CONFIDENCE_FOR_CALLING = callConf;
if (emitConf >= 0) uac.STANDARD_CONFIDENCE_FOR_EMITTING = emitConf;
if (callConf >= 0) uac.STANDARD_CONFIDENCE_FOR_CALLING = callConf;
snpEngine = new UnifiedGenotyperEngine(getToolkit(), uac);
// Adding the INDEL calling arguments for UG
uac.GLmodel = GenotypeLikelihoodsCalculationModel.Model.DINDEL;
indelEngine = new UnifiedGenotyperEngine(getToolkit(), uac);
// make sure we have callConf set to the threshold set by the UAC so we can use it later.
callConf = uac.STANDARD_CONFIDENCE_FOR_CALLING;
}
//---------------------------------------------------------------------------------------------------------------
@ -181,47 +192,74 @@ public class GenotypeAndValidateWalker extends RodWalker<GenotypeAndValidateWalk
// Do not operate on variants that are not covered to the optional minimum depth
if (!context.hasReads() || (minDepth > 0 && context.getBasePileup().getBases().length < minDepth)) {
counter.numUncovered = 1L;
counter.nUncovered = 1L;
return counter;
}
if (!vcComp.hasAttribute("GV"))
throw new UserException.BadInput("Variant has no GV annotation in the INFO field. " + vcComp.getChr() + ":" + vcComp.getStart());
VariantCallContext call;
if ( vcComp.isSNP() )
call = snpEngine.calculateLikelihoodsAndGenotypes(tracker, ref, context);
else if ( vcComp.isIndel() ) {
call = indelEngine.calculateLikelihoodsAndGenotypes(tracker, ref, context);
// if (call.vc == null) // variant context will be null on an extended indel event and I just want to call it one event.
// return counter;
}
else {
logger.info("Not SNP or INDEL " + vcComp.getChr() + ":" + vcComp.getStart() + " " + vcComp.getAlleles());
return counter;
}
if (!call.confidentlyCalled) {
counter.numNotConfidentCalls = 1L;
if (vcComp.getAttribute("GV").equals("T"))
counter.numFN = 1L;
else
counter.numTN = 1L;
if (bamIsTruth) {
if (call.confidentlyCalled) {
// If truth is a confident REF call
if (call.isVariant()) {
if (vcComp.isVariant())
counter.nAltCalledAlt = 1L; // todo -- may wanna check if the alts called are the same?
else
counter.nAltCalledRef = 1L;
}
// If truth is a confident ALT call
else {
if (vcComp.isVariant())
counter.nRefCalledAlt = 1L;
else
counter.nRefCalledRef = 1L;
}
}
else {
counter.nNotConfidentCalls = 1L;
}
}
else {
counter.numConfidentCalls = 1L;
if (vcComp.getAttribute("GV").equals("T"))
counter.numTP = 1L;
if (!vcComp.hasAttribute("GV"))
throw new UserException.BadInput("Variant has no GV annotation in the INFO field. " + vcComp.getChr() + ":" + vcComp.getStart());
if (call.isCalledAlt(callConf)) {
if (vcComp.getAttribute("GV").equals("T"))
counter.nAltCalledAlt = 1L;
else
counter.nRefCalledAlt = 1L;
}
else if (call.isCalledRef(callConf)) {
if (vcComp.getAttribute("GV").equals("T"))
counter.nAltCalledRef = 1L;
else
counter.nRefCalledRef = 1L;
}
else {
counter.nNotConfidentCalls = 1L;
}
}
if (vcfWriter != null) {
if (!vcComp.hasAttribute("callStatus")) {
MutableVariantContext mvc = new MutableVariantContext(vcComp);
mvc.putAttribute("callStatus", call.isCalledAlt(callConf) ? "ALT" : "REF" );
vcfWriter.add(mvc, ref.getBase());
}
else
counter.numFP = 1L;
vcfWriter.add(vcComp, ref.getBase());
}
if (!vcComp.hasAttribute("callStatus")) {
MutableVariantContext mvc = new MutableVariantContext(vcComp);
mvc.putAttribute("callStatus", call.confidentlyCalled ? "confident" : "notConfident" );
vcfWriter.add(mvc, ref.getBase());
}
else
vcfWriter.add(vcComp, ref.getBase());
return counter;
}
@ -235,17 +273,22 @@ public class GenotypeAndValidateWalker extends RodWalker<GenotypeAndValidateWalk
return new CountedData();
}
public CountedData treeReduce( final CountedData sum1, final CountedData sum2) {
sum2.add(sum1);
return sum2;
}
public CountedData reduce( final CountedData mapValue, final CountedData reduceSum ) {
reduceSum.add(mapValue);
return reduceSum;
}
public void onTraversalDone( CountedData reduceSum ) {
double ppv = 100 * ((double) reduceSum.numTP /( reduceSum.numTP + reduceSum.numFP));
double npv = 100 * ((double) reduceSum.numTN /( reduceSum.numTN + reduceSum.numFN));
double ppv = 100 * ((double) reduceSum.nAltCalledAlt /( reduceSum.nAltCalledAlt + reduceSum.nRefCalledAlt));
double npv = 100 * ((double) reduceSum.nRefCalledRef /( reduceSum.nRefCalledRef + reduceSum.nAltCalledRef));
logger.info(String.format("Resulting Truth Table Output\n\n" +
"---------------------------------------------------\n" +
"\t\t|\tT\t|\tF\t\n" +
"\t\t|\tALT\t|\tREF\t\n" +
"---------------------------------------------------\n" +
"called alt\t|\t%d\t|\t%d\n" +
"called ref\t|\t%d\t|\t%d\n" +
@ -253,20 +296,8 @@ public class GenotypeAndValidateWalker extends RodWalker<GenotypeAndValidateWalk
"positive predictive value: %f%%\n" +
"negative predictive value: %f%%\n" +
"---------------------------------------------------\n" +
"uncovered: %d\n" +
"---------------------------------------------------\n", reduceSum.numTP, reduceSum.numFP, reduceSum.numFN, reduceSum.numTN, ppv, npv, reduceSum.numUncovered));
/*
logger.info("called / true = " + reduceSum.numTP);
logger.info("not called / false = " + reduceSum.numTN);
logger.info("called /false = " + reduceSum.numFP);
logger.info("not called / true = " + reduceSum.numFN);
logger.info("PPV = " + 100 * ((double) reduceSum.numTP /( reduceSum.numTP + reduceSum.numFP)) + "%");
logger.info("NPV = " + 100 * ((double) reduceSum.numTN /( reduceSum.numTN + reduceSum.numFN)) + "%");
logger.info("confidently called = " + reduceSum.numConfidentCalls);
logger.info("not confidently called = " + reduceSum.numNotConfidentCalls );
logger.info("Uncovered = " + reduceSum.numUncovered);
*/
"not confident: %d\n" +
"not covered: %d\n" +
"---------------------------------------------------\n", reduceSum.nAltCalledAlt, reduceSum.nRefCalledAlt, reduceSum.nAltCalledRef, reduceSum.nRefCalledRef, ppv, npv, reduceSum.nNotConfidentCalls, reduceSum.nUncovered));
}
}

View File

@ -7,6 +7,7 @@ import org.broadinstitute.sting.queue.function.ListWriterFunction
import net.sf.samtools.{SAMFileReader,SAMFileHeader,SAMReadGroupRecord}
import collection.JavaConversions._
import org.broadinstitute.sting.commandline.ArgumentSource
class dataProcessingV2 extends QScript {
@ -27,6 +28,9 @@ class dataProcessingV2 extends QScript {
@Input(doc="path to R resources folder inside the Sting repository", shortName="r", required=true)
var R: String = _
@Input(doc="The path to the binary of bwa (usually BAM files have already been mapped - but if you want to remap this is the option)", shortName="bwa", required=false)
var bwaPath: File = _
@Input(doc="input BAM file - or list of BAM files", shortName="i", required=true)
var input: File = _
@ -57,24 +61,42 @@ class dataProcessingV2 extends QScript {
@Input(doc="output bams at intervals only", shortName="intervals", required=false)
var intervals: File = _
// Gracefully hide Queue's output
val queueLogDir: String = ".qlog/"
// Use the number of contigs for scatter gathering jobs
var nContigs: Int = -1
def script = {
// Helpful variables
// Updates and checks that all input files have the same number of contigs
// we use the number of contigs for scatter gather.
def updateNumberOfContigs(n: Int): Boolean = {
if (nContigs < 0) {
nContigs = n
return true
}
return nContigs == n
}
def createSampleFiles(): Map[String, File] = {
val outName: String = qscript.outputDir + qscript.projectName
//todo -- process bam headers to compile bamLists of samples.
var sampleTable = scala.collection.mutable.Map.empty[String, List[File]]
// Creating a table with SAMPLE information from each input BAM file
val sampleTable = scala.collection.mutable.Map.empty[String, List[File]]
for (bam <- scala.io.Source.fromFile(input).getLines) {
val bamFile = new File(bam)
val samReader = new SAMFileReader(bamFile)
val header = samReader.getFileHeader()
val readGroup = header.getReadGroups()
for (rg <- readGroup) {
// keep a record of the number of contigs in this bam file (they should all be the same
assert(updateNumberOfContigs(header.getSequenceDictionary.getSequences.size()), "Input BAMS should all have the same number of contigs. " + bam + " has " + header.getSequenceDictionary.getSequences.size())
val readGroups = header.getReadGroups()
// only allow one sample per file. Bam files with multiple samples would require pre-processing of the file
// with PrintReads to separate the samples. Tell user to do it himself!
assert(hasMultipleSamples(readGroups), "The pipeline requires that only one sample is present in a BAM file. Please separate the samples in " + bam)
// Fill out the sample table with the readgroups in this file
for (rg <- readGroups) {
val sample = rg.getSample()
if (!sampleTable.contains(sample))
sampleTable(sample) = List(bamFile)
@ -83,22 +105,36 @@ class dataProcessingV2 extends QScript {
}
}
// Creating one file for each sample in the dataset
val sampleBamFiles = scala.collection.mutable.Map.empty[String, File]
for ((sample, flist) <- sampleTable) {
val sampleFileName = new File(outName + "." + sample + ".bam")
sampleBamFiles(sample) = sampleFileName
add(joinBams(flist, sampleFileName))
}
return sampleBamFiles.toMap
}
println("\nFound the following samples (files created as necessary): ")
def script = {
//todo -- (option - BWA) run BWA on each bam file (per lane bam file) before performing per sample processing
var cohortList: List[File] = List()
val sampleBamFiles = createSampleFiles()
// Simple progress report
println("\nFound the following samples: ")
for ((sample, file) <- sampleBamFiles)
println("\t" + sample + " -> " + file)
val globalIntervals = new File(outName + ".intervals")
// If this is a 'knowns only' indel realignment run, do it only once for all samples.
val globalIntervals = new File(outputDir + projectName + ".intervals")
if (knownsOnly)
add(target(null, globalIntervals))
// Put each sample through the pipeline
for ((sample, bam) <- sampleBamFiles) {
// BAM files generated by the pipeline
@ -124,7 +160,13 @@ class dataProcessingV2 extends QScript {
cov(recalBam, postRecalFile),
analyzeCovariates(preRecalFile, preOutPath),
analyzeCovariates(postRecalFile, postOutPath))
cohortList :+= recalBam
}
// output a BAM list with all the processed per sample files
val cohortFile = new File(qscript.outputDir + qscript.projectName + ".cohort.list")
add(writeList(cohortList, cohortFile))
}
// General arguments to all programs
@ -142,9 +184,9 @@ class dataProcessingV2 extends QScript {
override def inputBams = join
override def outputBam = joined
override def commandLine = super.commandLine + " CREATE_INDEX=true"
this.memoryLimit = Some(6)
this.jarFile = qscript.mergeBamJar
this.isIntermediate = true
this.analysisName = queueLogDir + outBam + ".joinBams"
this.jobName = queueLogDir + outBam + ".joinBams"
}
@ -155,6 +197,8 @@ class dataProcessingV2 extends QScript {
this.mismatchFraction = Some(0.0)
this.rodBind :+= RodBind("dbsnp", "VCF", dbSNP)
this.rodBind :+= RodBind("indels", "VCF", indels)
this.scatterCount = nContigs
this.analysisName = queueLogDir + outIntervals + ".target"
this.jobName = queueLogDir + outIntervals + ".target"
}
@ -168,6 +212,8 @@ class dataProcessingV2 extends QScript {
this.doNotUseSW = useSW
this.compress = Some(0)
this.U = Some(org.broadinstitute.sting.gatk.arguments.ValidationExclusion.TYPE.NO_READ_ORDER_VERIFICATION) // todo -- update this with the last consensus between Tim, Matt and Eric. This is ugly!
this.scatterCount = nContigs
this.analysisName = queueLogDir + outBam + ".clean"
this.jobName = queueLogDir + outBam + ".clean"
}
@ -182,15 +228,17 @@ class dataProcessingV2 extends QScript {
sortOrder = null
this.memoryLimit = Some(6)
this.jarFile = qscript.dedupJar
this.isIntermediate = true
this.analysisName = queueLogDir + outBam + ".dedup"
this.jobName = queueLogDir + outBam + ".dedup"
}
//todo -- add scatter gather capability (waiting for khalid's modifications to the queue base
case class cov (inBam: File, outRecalFile: File) extends CountCovariates with CommandLineGATKArgs {
this.rodBind :+= RodBind("dbsnp", "VCF", dbSNP)
this.covariate ++= List("ReadGroupCovariate", "QualityScoreCovariate", "CycleCovariate", "DinucCovariate")
this.input_file :+= inBam
this.recal_file = outRecalFile
this.analysisName = queueLogDir + outRecalFile + ".covariates"
this.jobName = queueLogDir + outRecalFile + ".covariates"
}
@ -204,7 +252,9 @@ class dataProcessingV2 extends QScript {
else if (qscript.intervals != null) this.intervals :+= qscript.intervals
this.U = Some(org.broadinstitute.sting.gatk.arguments.ValidationExclusion.TYPE.NO_READ_ORDER_VERIFICATION) // todo -- update this with the last consensus between Tim, Matt and Eric. This is ugly!
this.index_output_bam_on_the_fly = Some(true)
this.analysisName = queueLogDir + outBam + ".recalibration"
this.jobName = queueLogDir + outBam + ".recalibration"
}
case class analyzeCovariates (inRecalFile: File, outPath: File) extends AnalyzeCovariates {
@ -212,12 +262,16 @@ class dataProcessingV2 extends QScript {
this.resources = qscript.R
this.recal_file = inRecalFile
this.output_dir = outPath.toString
this.analysisName = queueLogDir + inRecalFile + ".analyze_covariates"
this.jobName = queueLogDir + inRecalFile + ".analyze_covariates"
}
case class writeList(inBams: List[File], outBamList: File) extends ListWriterFunction {
this.inputFiles = inBams
this.listFile = outBamList
this.analysisName = queueLogDir + outBamList + ".bamList"
this.jobName = queueLogDir + outBamList + ".bamList"
}
}