Your time has come, SSG.

Fare thee well.


git-svn-id: file:///humgen/gsa-scr1/gsa-engineering/svn_contents/trunk@1799 348d0f76-0448-11de-a6fe-93d51630548a
This commit is contained in:
ebanks 2009-10-08 20:27:56 +00:00
parent 8fdb8922b8
commit a9f3d46fa8
9 changed files with 78 additions and 269 deletions

View File

@ -1,17 +1,15 @@
package org.broadinstitute.sting.gatk.walkers.genotyper;
import org.broadinstitute.sting.utils.genotype.GenotypeWriter;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.apache.log4j.Logger;
import java.util.Set;
import java.util.List;
public class AllMAFsGenotypeCalculationModel extends GenotypeCalculationModel {
protected AllMAFsGenotypeCalculationModel() {}
public boolean calculateGenotype(RefMetaDataTracker tracker, char ref, AlignmentContext context, DiploidGenotypePriors priors) {
public List<GenotypeCall> calculateGenotype(RefMetaDataTracker tracker, char ref, AlignmentContext context, DiploidGenotypePriors priors) {
// This is just a stub...
// TODO --- implement me
@ -20,6 +18,6 @@ public class AllMAFsGenotypeCalculationModel extends GenotypeCalculationModel {
// MAFs (1-N) and choose the most likely one. We can probably be smart and
// avoid calculating MAFs that we know can't be the most likely...
return true;
return null;
}
}

View File

@ -8,9 +8,7 @@ import org.broadinstitute.sting.utils.ReadBackedPileup;
import org.broadinstitute.sting.utils.Utils;
import org.broadinstitute.sting.utils.genotype.DiploidGenotype;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.*;
public class EMGenotypeCalculationModel extends GenotypeCalculationModel {
@ -26,7 +24,7 @@ public class EMGenotypeCalculationModel extends GenotypeCalculationModel {
protected EMGenotypeCalculationModel() {}
public boolean calculateGenotype(RefMetaDataTracker tracker, char ref, AlignmentContext context, DiploidGenotypePriors priors) {
public List<GenotypeCall> calculateGenotype(RefMetaDataTracker tracker, char ref, AlignmentContext context, DiploidGenotypePriors priors) {
// keep track of the GenotypeLikelihoods for each sample, separated by strand
HashMap<String, UnifiedGenotypeLikelihoods> GLs = new HashMap<String, UnifiedGenotypeLikelihoods>();
@ -51,7 +49,7 @@ public class EMGenotypeCalculationModel extends GenotypeCalculationModel {
if ( offset == -1 ) {
// are there too many deletions in the pileup?
if ( ++deletionsInPile > maxDeletionsInPileup )
return false;
return null;
continue;
}
@ -84,7 +82,7 @@ public class EMGenotypeCalculationModel extends GenotypeCalculationModel {
UnifiedGenotypeLikelihoods UGL = GLs.get(sample);
// if there were no good bases, the likelihoods object wouldn't exist
if ( UGL == null )
return false;
return null;
callsMetrics.nCalledBases++;
UGL.setPriors(priors);
@ -99,10 +97,10 @@ public class EMGenotypeCalculationModel extends GenotypeCalculationModel {
callsMetrics.nNonConfidentCalls++;
}
}
return true;
return Arrays.asList(call);
}
callsMetrics.nCalledBases++;
callsMetrics.nCalledBases++;
// Next, we need to create initial allele frequencies.
// An intelligent guess would be the observed base ratios (plus some small number to account for sampling issues).
@ -186,8 +184,9 @@ public class EMGenotypeCalculationModel extends GenotypeCalculationModel {
*/
//return new GenotypeCall(context.getLocation(), ref,gl, pileup);
//out.addMultiSampleCall((Genotype)calls);
return true;
return null;
}
double[] getPosteriorWeightedFrequencies(Collection<UnifiedGenotypeLikelihoods> UGLs) {

View File

@ -92,10 +92,10 @@ public abstract class GenotypeCalculationModel implements Cloneable {
* @param context alignment context
* @param priors priors to use for GL
*
* @return true if valid locus
* @return list of calls
*/
public abstract boolean calculateGenotype(RefMetaDataTracker tracker,
char ref,
AlignmentContext context,
DiploidGenotypePriors priors);
public abstract List<GenotypeCall> calculateGenotype(RefMetaDataTracker tracker,
char ref,
AlignmentContext context,
DiploidGenotypePriors priors);
}

View File

@ -1,193 +0,0 @@
package org.broadinstitute.sting.gatk.walkers.genotyper;
import org.broadinstitute.sting.gatk.GenomeAnalysisEngine;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.filters.ZeroMappingQualityReadFilter;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.LocusWalker;
import org.broadinstitute.sting.gatk.walkers.ReadFilters;
import org.broadinstitute.sting.utils.BaseUtils;
import org.broadinstitute.sting.utils.ReadBackedPileup;
import org.broadinstitute.sting.utils.cmdLine.Argument;
import org.broadinstitute.sting.utils.genotype.Genotype;
import org.broadinstitute.sting.utils.genotype.GenotypeWriter;
import org.broadinstitute.sting.utils.genotype.GenotypeWriterFactory;
import java.io.File;
import java.util.Arrays;
@ReadFilters(ZeroMappingQualityReadFilter.class)
public class SingleSampleGenotyper extends LocusWalker<GenotypeCall, SingleSampleGenotyper.CallResult> {
// Control output settings
@Argument(fullName = "variants_out", shortName = "varout", doc = "File to which variants should be written", required = false)
public File VARIANTS_FILE = null;
@Argument(fullName = "variant_output_format", shortName = "vf", doc = "File format to be used", required = false)
public GenotypeWriterFactory.GENOTYPE_FORMAT VAR_FORMAT = GenotypeWriterFactory.GENOTYPE_FORMAT.GELI;
// Control what goes into the variants file and what format that file should have
@Argument(fullName = "lod_threshold", shortName = "lod", doc = "The lod threshold on which variants should be filtered", required = false)
public double LOD_THRESHOLD = Double.MIN_VALUE;
@Argument(fullName = "genotype", shortName = "genotype", doc = "Should we output confident genotypes or just the variants?", required = false)
public boolean GENOTYPE = false;
// Control how the genotype hypotheses are weighed
@Argument(fullName = "heterozygosity", shortName = "hets", doc = "Heterozygosity value used to compute prior likelihoods for any locus", required = false)
public Double heterozygosity = DiploidGenotypePriors.HUMAN_HETEROZYGOSITY;
@Argument(fullName = "baseModel", shortName = "m", doc = "Base substitution model to employ -- EMPIRICAL is the recommended default, but it's possible to select the ONE_STATE and THREE_STATE models for comparison purposes", required = false)
public BaseMismatchModel baseModel = BaseMismatchModel.EMPIRICAL;
@Argument(fullName = "verbose", shortName = "v", doc = "EXPERIMENTAL", required = false)
public boolean VERBOSE = false;
@Argument(fullName = "platform", shortName = "pl", doc = "Causes the genotyper to assume that reads without PL header TAG are this platform. Defaults to null, indicating that the system will throw a runtime exception when such reads are detected", required = false)
public EmpiricalSubstitutionGenotypeLikelihoods.SequencerPlatform defaultPlatform = null;
@Argument(fullName = "disableCache", doc = "[ADVANCED] If true, we won't use the caching system. This argument is for testing purposes only", required = false)
public boolean disableCache = false;
@Argument(fullName = "sample_name", shortName = "sn", doc = "What the label should be for the emited call track. Used by some genotype formats", required = false)
public String SAMPLE_NAME = null;
public class CallResult {
long nConfidentCalls = 0;
long nNonConfidentCalls = 0;
long nCalledBases = 0;
GenotypeWriter writer;
CallResult(GenotypeWriter writer) {
this.writer = writer;
}
public String toString() {
return String.format("SSG: %d confident and %d non-confident calls were made at %d bases",
nConfidentCalls, nNonConfidentCalls, nCalledBases);
}
}
/**
* Filter out loci to ignore (at an ambiguous base in the reference or a locus with zero coverage).
*
* @param tracker the meta data tracker
* @param ref the reference base
* @param context contextual information around the locus
*
* @return true if we should look at this locus, false otherwise
*/
public boolean filter(RefMetaDataTracker tracker, char ref, AlignmentContext context) {
return (BaseUtils.simpleBaseToBaseIndex(ref) != -1 && context.getReads().size() != 0);
}
/** Initialize the walker with some sensible defaults */
public void initialize() {
if (SAMPLE_NAME == null) {
if (GenomeAnalysisEngine.instance.getArguments().samFiles.size() == 1) {
SAMPLE_NAME = GenomeAnalysisEngine.instance.getArguments().samFiles.get(0).getName();
} else {
SAMPLE_NAME = "Combined_BAM";
}
}
}
/**
* Compute at a given locus.
*
* @param tracker the meta data tracker
* @param refContext the reference base
* @param context contextual information around the locus
*/
public GenotypeCall map(RefMetaDataTracker tracker, ReferenceContext refContext, AlignmentContext context) {
char ref = refContext.getBase();
if (BaseUtils.isRegularBase(ref)) {
DiploidGenotypePriors priors = new DiploidGenotypePriors(ref, heterozygosity, DiploidGenotypePriors.PROB_OF_TRISTATE_GENOTYPE);
// setup GenotypeLike object
GenotypeLikelihoods gl = GenotypeLikelihoodsFactory.makeGenotypeLikelihoods(baseModel, priors, defaultPlatform);
gl.setVerbose(VERBOSE);
gl.setEnableCacheFlag(!disableCache);
ReadBackedPileup pileup = new ReadBackedPileup(ref, context);
gl.add(pileup, true);
gl.validate();
return new GenotypeCall(SAMPLE_NAME,context.getLocation(), ref, gl, pileup);
} else {
return null;
}
}
/**
* Determine whether we're at a Hapmap site
*
* @param tracker the meta data tracker
*
* @return true if we're at a Hapmap site, false if otherwise
*/
private boolean isHapmapSite(RefMetaDataTracker tracker) {
return tracker.lookup("hapmap", null) != null;
}
/**
* Determine whether we're at a dbSNP site
*
* @param tracker the meta data tracker
*
* @return true if we're at a dbSNP site, false if otherwise
*/
private boolean isDbSNPSite(RefMetaDataTracker tracker) {
return tracker.lookup("dbsnp", null) != null;
}
/**
* Initialize values appropriately for the reduce step.
*
* @return an empty string
*/
public CallResult reduceInit() {
if (VARIANTS_FILE != null)
return new CallResult(GenotypeWriterFactory.create(VAR_FORMAT,
GenomeAnalysisEngine.instance.getSAMFileHeader(),
VARIANTS_FILE,
"SingleSampleGenotyper",
this.getToolkit().getArguments().referenceFile.getName()));
else
return new CallResult(GenotypeWriterFactory.create(VAR_FORMAT, GenomeAnalysisEngine.instance.getSAMFileHeader(), out));
}
/**
* If we've found a LOD variant or callable genotype, output it to disk.
*
* @param call an GenotypeCall object for the variant.
* @param sum accumulator for the reduce.
*
* @return an empty string
*/
public CallResult reduce(GenotypeCall call, CallResult sum) {
sum.nCalledBases++;
if (call != null && (GENOTYPE || call.isVariant(call.getReference()))) {
double confidence = (GENOTYPE) ? call.getNegLog10PError() : call.toVariation().getNegLog10PError();
if (confidence >= LOD_THRESHOLD) {
sum.nConfidentCalls++;
//System.out.printf("Call %s%n", call);
if (sum.writer.supportsMulitSample()) {
sum.writer.addMultiSampleCall(Arrays.asList((Genotype)call));
} else {
sum.writer.addGenotypeCall(call);
}
} else
sum.nNonConfidentCalls++;
}
return sum;
}
/** Close the variant file. */
public void onTraversalDone(CallResult sum) {
sum.writer.close();
}
}

View File

@ -47,7 +47,7 @@ import java.util.List;
@ReadFilters({ZeroMappingQualityReadFilter.class, MissingReadGroupFilter.class})
public class UnifiedGenotyper extends LocusWalker<Integer, Integer> {
public class UnifiedGenotyper extends LocusWalker<List<GenotypeCall>, Integer> {
@ArgumentCollection private UnifiedArgumentCollection UAC = new UnifiedArgumentCollection();
@ -113,17 +113,17 @@ public class UnifiedGenotyper extends LocusWalker<Integer, Integer> {
* @param refContext the reference base
* @param context contextual information around the locus
*/
public Integer map(RefMetaDataTracker tracker, ReferenceContext refContext, AlignmentContext context) {
public List<GenotypeCall> map(RefMetaDataTracker tracker, ReferenceContext refContext, AlignmentContext context) {
char ref = Character.toUpperCase(refContext.getBase());
if ( !BaseUtils.isRegularBase(ref) )
return 0;
return null;
// an optimization to speed things up when overly covered
if ( UAC.MAX_READS_IN_PILEUP > 0 && context.getReads().size() > UAC.MAX_READS_IN_PILEUP )
return 0;
return null;
DiploidGenotypePriors priors = new DiploidGenotypePriors(ref, UAC.heterozygosity, DiploidGenotypePriors.PROB_OF_TRISTATE_GENOTYPE);
return ( gcm.calculateGenotype(tracker, ref, context, priors) ? 1 : 0 );
return gcm.calculateGenotype(tracker, ref, context, priors);
}
/**
@ -150,8 +150,8 @@ public class UnifiedGenotyper extends LocusWalker<Integer, Integer> {
public Integer reduceInit() { return 0; }
public Integer reduce(Integer value, Integer sum) {
return sum + value;
public Integer reduce(List<GenotypeCall> value, Integer sum) {
return sum + 1;
}
/** Close the variant file. */

View File

@ -7,7 +7,7 @@ import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.refdata.RodGenotypeChipAsGFF;
import org.broadinstitute.sting.gatk.walkers.LocusWalker;
import org.broadinstitute.sting.gatk.walkers.genotyper.GenotypeCall;
import org.broadinstitute.sting.gatk.walkers.genotyper.SingleSampleGenotyper;
import org.broadinstitute.sting.gatk.walkers.genotyper.UnifiedGenotyper;
import org.broadinstitute.sting.utils.genotype.DiploidGenotype;
import org.broadinstitute.sting.utils.BaseUtils;
import org.broadinstitute.sting.utils.ListUtils;
@ -36,11 +36,11 @@ public class CoverageEvalWalker extends LocusWalker<List<String>, String> {
@Argument(fullName="max_coverage", shortName="maxcov", doc="Maximum coverage to downsample to", required=false) public int max_coverage=Integer.MAX_VALUE;
@Argument(fullName="downsampling_repeats", shortName="repeat", doc="Number of times to repeat downsampling at each coverage level", required=false) public int downsampling_repeats=1;
SingleSampleGenotyper SSG;
UnifiedGenotyper UG;
public void initialize() {
SSG = new SingleSampleGenotyper();
SSG.initialize();
UG = new UnifiedGenotyper();
UG.initialize();
String header = "#Sequence Position ReferenceBase NumberOfReads MaxMappingQuality BestGenotype BtrLod BtnbLod AA AC AG AT CC CG CT GG GT TT";
out.println("DownsampledCoverage\tAvailableCoverage\tHapmapChipGenotype\tGenotypeCallType\t"+header.substring(1));
@ -80,10 +80,10 @@ public class CoverageEvalWalker extends LocusWalker<List<String>, String> {
List<Integer> sub_offsets = ListUtils.sliceListByIndices(subset_indices, offsets);
AlignmentContext subContext = new AlignmentContext(context.getLocation(), sub_reads, sub_offsets);
GenotypeCall call = SSG.map(tracker, ref, subContext);
String callType = (call.isVariant(call.getReference())) ? ((call.isHom()) ? "HomozygousSNP" : "HeterozygousSNP") : "HomozygousReference";
if (call != null) {
List<GenotypeCall> calls = UG.map(tracker, ref, subContext);
if (calls != null) {
GenotypeCall call = calls.get(0);
String callType = (call.isVariant(call.getReference())) ? ((call.isHom()) ? "HomozygousSNP" : "HeterozygousSNP") : "HomozygousReference";
GenotypeCalls.add(coverage+"\t"+coverage_available+"\t"+hc_genotype+"\t"+callType+"\t"+toGeliString(call));
}
}

View File

@ -8,7 +8,7 @@ import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.refdata.VariationRod;
import org.broadinstitute.sting.gatk.walkers.*;
import org.broadinstitute.sting.gatk.walkers.genotyper.GenotypeCall;
import org.broadinstitute.sting.gatk.walkers.genotyper.SingleSampleGenotyper;
import org.broadinstitute.sting.gatk.walkers.genotyper.UnifiedGenotyper;
import org.broadinstitute.sting.utils.genotype.Variation;
import java.util.ArrayList;
@ -30,12 +30,12 @@ import java.util.Set;
public class DeNovoSNPWalker extends RefWalker<String, Integer>{
SingleSampleGenotyper SSG;
UnifiedGenotyper UG;
private List<Set<String>> readGroupSets;
public void initialize() {
SSG = new SingleSampleGenotyper();
SSG.initialize();
UG = new UnifiedGenotyper();
UG.initialize();
readGroupSets = getToolkit().getMergedReadGroupsByReaders();
}
@ -70,34 +70,38 @@ public class DeNovoSNPWalker extends RefWalker<String, Integer>{
}
AlignmentContext parent1_subContext = new AlignmentContext(context.getLocation(), parent1_reads, parent1_offsets);
GenotypeCall parent1 = SSG.map(tracker, ref, parent1_subContext);
List<GenotypeCall> parent1 = UG.map(tracker, ref, parent1_subContext);
AlignmentContext parent2_subContext = new AlignmentContext(context.getLocation(), parent2_reads, parent2_offsets);
GenotypeCall parent2 = SSG.map(tracker, ref, parent2_subContext);
List<GenotypeCall> parent2 = UG.map(tracker, ref, parent2_subContext);
if (!parent1.isVariant(parent1.getReference()) &&
parent1.getNegLog10PError() > 5 &&
!parent2.isVariant(parent2.getReference()) &&
parent2.getNegLog10PError() > 5
) {
if ( parent1 != null && parent2 != null ) {
GenotypeCall parent1call = parent1.get(0);
GenotypeCall parent2call = parent2.get(0);
double sumConfidences = 0.5 * (0.5 * child.getNegLog10PError() +
Math.min(parent1.getNegLog10PError(), parent2.getNegLog10PError()));
if (!parent1call.isVariant(parent1call.getReference()) &&
parent1call.getNegLog10PError() > 5 &&
!parent2call.isVariant(parent2call.getReference()) &&
parent2call.getNegLog10PError() > 5) {
out.format("%s\t", child.getLocation().getContig());
out.format("%s\t", child.getLocation().getStart());
out.format("%.4f\t", sumConfidences);
out.format("%.4f\t", child.getNegLog10PError());
out.format("%.4f\t", parent1.getNegLog10PError());
out.format("%.4f\t", parent2.getNegLog10PError());
out.format("%s\t", dbsnp != null);
double sumConfidences = 0.5 * (0.5 * child.getNegLog10PError() +
Math.min(parent1call.getNegLog10PError(), parent2call.getNegLog10PError()));
out.format ("%s\t", child.toString());
out.format ("%s\t", parent1.toString());
out.format ("%s", parent2.toString());
if (dbsnp != null)
out.format ("\tDBSNP\t:%s", dbsnp.toString());
out.println();
out.format("%s\t", child.getLocation().getContig());
out.format("%s\t", child.getLocation().getStart());
out.format("%.4f\t", sumConfidences);
out.format("%.4f\t", child.getNegLog10PError());
out.format("%.4f\t", parent1call.getNegLog10PError());
out.format("%.4f\t", parent2call.getNegLog10PError());
out.format("%s\t", dbsnp != null);
out.format ("%s\t", child.toString());
out.format ("%s\t", parent1.toString());
out.format ("%s", parent2.toString());
if (dbsnp != null)
out.format ("\tDBSNP\t:%s", dbsnp.toString());
out.println();
}
}
}
}

View File

@ -1,7 +1,7 @@
package org.broadinstitute.sting.playground.gatk.walkers.poolseq;
import org.broadinstitute.sting.gatk.walkers.LocusWalker;
import org.broadinstitute.sting.gatk.walkers.genotyper.SingleSampleGenotyper;
import org.broadinstitute.sting.gatk.walkers.genotyper.UnifiedGenotyper;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
@ -39,11 +39,11 @@ public class ArtificialPoolWalker extends LocusWalker<ArtificialPoolContext, Art
public ArtificialPoolContext reduceInit() { // try to initialize the file writer
ArtificialPoolContext apContext = new ArtificialPoolContext();
apContext.setSingleSampleGenotyper(new SingleSampleGenotyper());
apContext.setSingleSampleGenotyper(new UnifiedGenotyper());
apContext.setReadGroupSets(getToolkit().getMergedReadGroupsByReaders());
apContext.setAuxWriter(initializeAuxFileWriter(apContext.getTotalNumberOfPeople()));
apContext.setSAMFileWriter(outputBamFile);
apContext.initializeSSG();
apContext.initializeUG();
return apContext;
}

View File

@ -5,7 +5,7 @@ import net.sf.samtools.SAMRecord;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.genotyper.SingleSampleGenotyper;
import org.broadinstitute.sting.gatk.walkers.genotyper.UnifiedGenotyper;
import org.broadinstitute.sting.utils.Pair;
import org.broadinstitute.sting.utils.genotype.Genotype;
@ -25,7 +25,7 @@ import java.util.Set;
public class ArtificialPoolContext {
private PrintWriter writerToAuxiliaryFile;
private SAMFileWriter writerToSamFile;
private SingleSampleGenotyper ssg;
private UnifiedGenotyper ug;
private List<Set<String>> readGroupSets;
private long[] runningCoverage;
private RefMetaDataTracker refTracker;
@ -36,7 +36,7 @@ public class ArtificialPoolContext {
readGroupSets = null;
writerToAuxiliaryFile = null;
writerToSamFile = null;
ssg = null;
ug = null;
refTracker = null;
aliContext = null;
refContext = null;
@ -50,14 +50,14 @@ public class ArtificialPoolContext {
readGroupSets = null;
writerToAuxiliaryFile = null;
writerToSamFile=null;
ssg = null;
ug = null;
runningCoverage = null;
}
public ArtificialPoolContext(PrintWriter pw, SAMFileWriter sw, SingleSampleGenotyper g, List<Set<String>> rgs, long [] runcvg, RefMetaDataTracker rt, ReferenceContext rc, AlignmentContext ac) {
public ArtificialPoolContext(PrintWriter pw, SAMFileWriter sw, UnifiedGenotyper g, List<Set<String>> rgs, long [] runcvg, RefMetaDataTracker rt, ReferenceContext rc, AlignmentContext ac) {
writerToAuxiliaryFile = pw;
writerToSamFile = sw;
ssg = g;
ug = g;
readGroupSets = rgs;
runningCoverage = runcvg;
refTracker = rt;
@ -69,12 +69,12 @@ public class ArtificialPoolContext {
writerToAuxiliaryFile = writer;
}
public void setSingleSampleGenotyper(SingleSampleGenotyper typer) {
ssg = typer;
public void setSingleSampleGenotyper(UnifiedGenotyper typer) {
ug = typer;
}
public void initializeSSG() {
ssg.initialize();
public void initializeUG() {
ug.initialize();
}
public void setReadGroupSets(List<Set<String>> rgSets) {
@ -121,8 +121,8 @@ public class ArtificialPoolContext {
return writerToAuxiliaryFile;
}
public SingleSampleGenotyper getSingleSampleGenotyper() {
return ssg;
public UnifiedGenotyper getSingleSampleGenotyper() {
return ug;
}
public List<Set<String>> getReadGroupSets() {
@ -257,8 +257,9 @@ public class ArtificialPoolContext {
public Genotype getGenotype(int group) {
AlignmentContext alicon = this.getAlignmentContext();
Pair<List<SAMRecord>[],List<Integer>[]> byGroupSplitPair = this.splitByGroup(alicon.getReads(),alicon.getOffsets());
return ssg.map(this.getRefMetaDataTracker(),this.getReferenceContext(),
List<? extends Genotype> result = ug.map(this.getRefMetaDataTracker(),this.getReferenceContext(),
new AlignmentContext(this.getAlignmentContext().getLocation(), byGroupSplitPair.first[group],byGroupSplitPair.second[group]));
return (result == null ? null : result.get(0));
}
public static List<SAMRecord>[] sampleReads(List<SAMRecord>[] reads, double[] propEstGlobal) {