Removing deprecated code and walkers for which I had the green light from repository.

Moved piecemealannotator and secondarybases to archive.



git-svn-id: file:///humgen/gsa-scr1/gsa-engineering/svn_contents/trunk@2195 348d0f76-0448-11de-a6fe-93d51630548a
This commit is contained in:
ebanks 2009-12-01 05:58:20 +00:00
parent 2c16c18a04
commit 084337087e
38 changed files with 0 additions and 1104 deletions

View File

@ -1,46 +0,0 @@
package org.broadinstitute.sting.playground.gatk.walkers;
import net.sf.samtools.SAMRecord;
import org.broadinstitute.sting.gatk.walkers.ReadWalker;
import org.broadinstitute.sting.gatk.walkers.WalkerName;
@WalkerName("Base_Quality_Dump")
public class BaseQualityDumpWalker extends ReadWalker<Integer, Integer> {
protected final int MIN_TARGET_EDIT_DISTANCE = 0; //5;
protected final int MAX_TARGET_EDIT_DISTANCE = 4; //10;
// Do we actually want to operate on the context?
public boolean filter(char[] ref, SAMRecord read) {
// we only want aligned reads
return !read.getReadUnmappedFlag();
}
public Integer map(char[] ref, SAMRecord read) {
int editDist = Integer.parseInt(read.getAttribute("NM").toString());
// ignore alignments with indels for now
if ( read.getAlignmentBlocks().size() == 1 &&
editDist >= MIN_TARGET_EDIT_DISTANCE &&
editDist <= MAX_TARGET_EDIT_DISTANCE ) {
String qualStr = read.getBaseQualityString();
int[] scores = new int[qualStr.length()];
boolean reverseFlag = read.getReadNegativeStrandFlag();
for ( int i = 0; i < qualStr.length(); i++)
scores[(reverseFlag ? (qualStr.length()-1-i) : i)] += (int)qualStr.charAt(i) - 33;
for ( int i = 0; i < scores.length; i++ )
out.print(scores[i] + " ");
out.println("");
}
return 1;
}
public Integer reduceInit() { return 0; }
public Integer reduce(Integer value, Integer sum) {
return value + sum;
}
}

View File

@ -1,60 +0,0 @@
package org.broadinstitute.sting.playground.gatk.walkers;
import net.sf.samtools.SAMRecord;
import org.broadinstitute.sting.gatk.walkers.ReadWalker;
import org.broadinstitute.sting.gatk.walkers.WalkerName;
/**
* Created by IntelliJ IDEA.
* User: mdepristo
* Date: Feb 22, 2009
* Time: 3:22:14 PM
* To change this template use File | Settings | File Templates.
*/
@WalkerName("Base_Quality_Histogram")
public class BaseQualityHistoWalker extends ReadWalker<Integer, Integer> {
long[] qualCounts = new long[100];
public void initialize() {
for ( int i = 0; i < this.qualCounts.length; i++ ) {
this.qualCounts[i] = 0;
}
}
// Do we actually want to operate on the context?
public boolean filter(char[] ref, SAMRecord read) {
return true; // We are keeping all the reads
}
public Integer map(char[] ref, SAMRecord read) {
for ( byte qual : read.getBaseQualities() ) {
if ( qual < 0 || qual > 100 ) {
throw new RuntimeException(String.format("Invalid base quality detected -- %d at %s%n", qual, read.getReadName()));
}
//System.out.println(qual);
this.qualCounts[qual]++;
}
//System.out.println(read.getReadName());
return 1;
}
// Given result of map function
public Integer reduceInit() { return 0; }
public Integer reduce(Integer value, Integer sum) {
return value + sum;
}
public void onTraversalDone(Integer result) {
int lastNonZero = -1;
for ( int i = this.qualCounts.length-1; i >= 0; i-- ) {
if ( this.qualCounts[i] > 0 ) {
lastNonZero = i;
break;
}
}
for ( int i = 0; i < lastNonZero+1; i++ ) {
out.printf("%3d : %10d%n", i, this.qualCounts[i]);
}
}
}

View File

@ -1,57 +0,0 @@
package org.broadinstitute.sting.playground.gatk.walkers;
import org.broadinstitute.sting.gatk.walkers.ReadWalker;
import org.broadinstitute.sting.utils.QualityUtils;
import net.sf.samtools.SAMRecord;
public class DisplayFourBaseReadWalker extends ReadWalker<Integer, Integer> {
public Integer map(char[] ref, SAMRecord read) {
String bases = read.getReadString();
boolean displayed = false;
byte[] sq = (byte[]) read.getAttribute("SQ");
if (read.getReadName().equalsIgnoreCase("30JJE.5.24197751")) {
System.out.println(read.format());
}
for (int i = 0; i < sq.length; i++) {
int baseIndex = QualityUtils.compressedQualityToBaseIndex(sq[i]);
char base = '.';
switch (baseIndex) {
case 0: base = 'A'; break;
case 1: base = 'C'; break;
case 2: base = 'G'; break;
case 3: base = 'T'; break;
}
if (base == bases.charAt(i)) {
if (!displayed) {
System.out.println(bases);
displayed = true;
}
System.out.print(base);
}
else {
if (displayed) {
System.out.print(" ");
}
}
}
if (displayed) {
System.out.print("\n");
}
return 0;
}
public Integer reduceInit() {
return 0;
}
public Integer reduce(Integer value, Integer sum) {
return 0;
}
}

View File

@ -1,52 +0,0 @@
package org.broadinstitute.sting.playground.gatk.walkers;
import org.broadinstitute.sting.gatk.walkers.LocusWalker;
import org.broadinstitute.sting.gatk.walkers.TreeReducible;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.utils.BaseUtils;
import org.broadinstitute.sting.utils.BasicPileup;
/**
* Created by IntelliJ IDEA.
* User: chartl
* Date: Oct 28, 2009
* Time: 3:19:48 PM
* To change this template use File | Settings | File Templates.
*/
public class IndelCounterWalker extends LocusWalker<Integer,Integer> implements TreeReducible<Integer> {
public Integer reduceInit() {
return 0;
}
public Integer reduce( Integer prevReduce, Integer map ) {
return map + prevReduce;
}
public Integer treeReduce(Integer lhs, Integer rhs) {
return reduce(lhs,rhs);
}
public Integer map ( RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context ) {
if (BaseUtils.isRegularBase(ref.getBase()) ) {
return numIndels(context);
} else {
return 0;
}
}
public Integer numIndels ( AlignmentContext context ) {
String[] indelPileup = BasicPileup.indelPileup(context.getReads(),context.getOffsets());
// number of indels is the number of non-"null" indeces
int nIndel = 0;
for( String indel : indelPileup ) {
if ( ! indel.equals("null") ) { // currently how non-indel bases are represented in pileup
nIndel ++;
}
}
return nIndel;
}
}

View File

@ -1,142 +0,0 @@
package org.broadinstitute.sting.playground.gatk.walkers.Recalibration;
import org.broadinstitute.sting.gatk.refdata.rodDbSNP;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.RMD;
import org.broadinstitute.sting.gatk.walkers.DataSource;
import org.broadinstitute.sting.gatk.walkers.Requires;
import org.broadinstitute.sting.gatk.walkers.LocusWalker;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.utils.QualityUtils;
import org.broadinstitute.sting.utils.Pair;
import org.broadinstitute.sting.utils.BaseUtils;
import org.broadinstitute.sting.utils.genotype.Variation;
import org.broadinstitute.sting.utils.cmdLine.Argument;
import java.util.List;
import java.util.ArrayList;
import net.sf.samtools.SAMRecord;
/**
* Created by IntelliJ IDEA.
* User: chartl
* Date: Oct 20, 2009
* Time: 9:41:57 AM
* To change this template use File | Settings | File Templates.
*/
@Requires(value={DataSource.REFERENCE}, referenceMetaData = {@RMD(name="dbsnp",type= rodDbSNP.class)})
public class MinimumNQSWalker extends LocusWalker<Pair<List<Pair<Integer,Integer>>,List<Pair<Integer,Integer>>>, int[][][]> {
private static int MM_OFFSET = 1;
private static int MATCH_OFFSET = 0;
private static int QSCORE_MAX = 1 + QualityUtils.MAX_REASONABLE_Q_SCORE;
@Argument(fullName="windowSize", shortName="ws", doc="Size of the window (in one direction)", required=true)
private int winSide = 4;
public void initialize() {
out.printf("%s%n", makeHeader());
}
public int[][][] reduceInit() {
int[][][] counts = new int[QSCORE_MAX][QSCORE_MAX][2];
for ( int i = 0; i < QSCORE_MAX; i ++ ) {
for ( int j = 0; j < QSCORE_MAX; j ++ ) {
counts[i][j][1]=0;
counts[i][j][0]=0;
}
}
return counts;
}
public int[][][] reduce(Pair<List<Pair<Integer,Integer>>,List<Pair<Integer,Integer>>> map, int[][][] prevReduce) {
if ( map != null ) {
List<Pair<Integer,Integer>> matchingQualityNQSPairs = map.getFirst();
List<Pair<Integer,Integer>> mismatchingQualityNQSPairs = map.getSecond();
for ( Pair<Integer,Integer> p : matchingQualityNQSPairs ) {
prevReduce[p.getFirst()][p.getSecond()][MATCH_OFFSET] ++;
}
for ( Pair<Integer,Integer> p : mismatchingQualityNQSPairs ) {
prevReduce[p.getFirst()][p.getSecond()][MM_OFFSET] ++;
}
}
return prevReduce;
}
public Pair<List<Pair<Integer,Integer>>,List<Pair<Integer,Integer>>> map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
ArrayList<Pair<Integer,Integer>> matchingQualityNQSPairs = new ArrayList<Pair<Integer,Integer>>();
ArrayList<Pair<Integer,Integer>> mismatchingQualityNQSPairs = new ArrayList<Pair<Integer,Integer>>();
if ( (Variation) tracker.lookup("dbsnp",null) == null ) {
for ( int r = 0; r < context.size(); r ++ ) {
SAMRecord read = context.getReads().get(r);
int offset = context.getOffsets().get(r);
int quality = read.getBaseQualities()[offset];
int NQS = getNghdQ(offset, read);
Pair<Integer,Integer> qualityNQSPair = new Pair<Integer,Integer> (quality,NQS);
if ( BaseUtils.basesAreEqual(read.getReadBases()[offset], (byte) ref.getBase()) ) {
matchingQualityNQSPairs.add(qualityNQSPair);
} else {
mismatchingQualityNQSPairs.add(qualityNQSPair);
}
}
return new Pair<List<Pair<Integer,Integer>>,List<Pair<Integer,Integer>>>(matchingQualityNQSPairs,mismatchingQualityNQSPairs);
} else {
return null;
}
}
public void onTraversalDone( int[][][] reduce ) {
for ( int qc = 0; qc < QSCORE_MAX; qc ++ ) {
for ( int qn = 0; qn < QSCORE_MAX; qn ++ ) {
out.printf("%s%n", formatData(reduce[qc][qn],qc,qn));
}
}
}
public int getNghdQ(int off, SAMRecord read) {
// System.out.println("getNghdQ");
byte minQ = Byte.MAX_VALUE;
byte[] quals = read.getBaseQualities();
int rdlnth = read.getReadLength();
int start;
int end;
if ( off - winSide < 0 ) {
start = 0;
} else {
start = off - winSide;
}
if ( off + winSide > rdlnth ) {
end = rdlnth;
} else {
end = off + winSide;
}
for ( int i = start; i < end; i ++ ) {
if ( i != off ) {
byte q = quals[i];
if ( q < minQ ) {
minQ = q;
}
}
}
return minQ;
}
private String makeHeader() {
return String.format("%s\t%s\t%s\t%s\t%s\t%s","Reported_Q","Min_Nghd_Q","N_observations","Mm_rate","Empirical_Q","Q_diff");
}
private String formatData( int[] mmArray, int qCenter, int qNghd ) {
int counts = mmArray[MM_OFFSET]+mmArray[MATCH_OFFSET];
double mismatch = ((double)mmArray[MM_OFFSET]/counts);
byte qEmp = QualityUtils.probToQual(1-mismatch);
return String.format("%d\t%d\t%d\t%f\t%d\t%d", qCenter, qNghd, counts, mismatch, qEmp,qEmp-qCenter);
}
}

View File

@ -1,154 +0,0 @@
package org.broadinstitute.sting.playground.gatk.walkers.poolseq;
import org.broadinstitute.sting.gatk.walkers.LocusWalker;
import org.broadinstitute.sting.gatk.walkers.genotyper.UnifiedGenotyper;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import net.sf.samtools.SAMFileWriter;
import net.sf.samtools.SAMRecord;
import org.broadinstitute.sting.playground.utils.ArtificialPoolContext;
import org.broadinstitute.sting.utils.cmdLine.Argument;
import org.broadinstitute.sting.utils.StingException;
import java.io.FileOutputStream;
import java.io.PrintWriter;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Set;
import java.util.List;
import java.util.ListIterator;
import java.util.LinkedList;
/**
* Created by IntelliJ IDEA.
* User: chartl
* Date: Aug 26, 2009
* Time: 11:28:26 AM
* To change this template use File | Settings | File Templates.
*/
public class ArtificialPoolWalker extends LocusWalker<ArtificialPoolContext, ArtificialPoolContext> {
@Argument(fullName = "AuxOutputFile", shortName = "af", doc = "Auxiliary file for genotyp & coverage output", required = true)
String auxFilePath = null;
@Argument(fullName = "OutputBamFile", shortName = "of", doc = "Output to this file rather than standard output", required = false)
SAMFileWriter outputBamFile = null;
public void initialize() {
}
public ArtificialPoolContext reduceInit() { // try to initialize the file writer
ArtificialPoolContext apContext = new ArtificialPoolContext();
apContext.setSingleSampleGenotyper(new UnifiedGenotyper());
apContext.setReadGroupSets(getToolkit().getMergedReadGroupsByReaders());
apContext.setAuxWriter(initializeAuxFileWriter(apContext.getTotalNumberOfPeople()));
apContext.setSAMFileWriter(outputBamFile);
apContext.initializeUG();
return apContext;
}
public ArtificialPoolContext map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
return new ArtificialPoolContext(tracker,ref,context);
}
public ArtificialPoolContext reduce(ArtificialPoolContext mapCon, ArtificialPoolContext redCon){
/* ArtificialPoolContext updatedContext = ArtificialPoolContext.mapReduceMerge(mapCon,redCon);
List<SAMRecord>[] newReads = updatedContext.splitReadsByGroup(updatedContext.getNewReads());
long[] newCvg = updateRunningCoverage(updatedContext.getRunningCoverage(), getCoverageByGroup(newReads));
updatedContext.setRunningCoverage(newCvg);
List<SAMRecord>[] sampledReads = ArtificialPoolContext.sampleReads(newReads,runningCoverageToDouble(newCvg));
printToFiles(sampledReads,updatedContext);*/
AlignmentContext context = redCon.getAlignmentContext();
SAMFileWriter writer = redCon.getSAMFileWriter();
for(SAMRecord read : context.getReads()) {
writer.addAlignment(read);
}
PrintWriter auxWrite = redCon.getWriterToAuxiliaryFile();
auxWrite.print("This is a test.");
ArtificialPoolContext updatedContext = redCon;
return updatedContext;
}
// Helper methods follow
private PrintWriter initializeAuxFileWriter(int nFiles) {
PrintWriter auxFileWriter;
try {
auxFileWriter = new PrintWriter(new FileOutputStream(auxFilePath));
auxFileWriter.println(createAuxFileHeader(nFiles));
} catch(FileNotFoundException e) {
String errmsg = "The filepath you entered "+auxFilePath+" could not be opened. Please double-check that the input is correct.";
throw new StingException(errmsg, e);
} catch(IOException e) {
String errmsg = "The file you entered "+auxFilePath+" could not be written to. Please check your permissions to write to this file.";
throw new StingException(errmsg,e);
}
return auxFileWriter;
}
private String createAuxFileHeader(int nFiles) {
String sp = " ";
String st1 = "Chrom:Pos" + sp;
String st2 = "";
for(int j = 0; j < nFiles; j++) {
st2 = st2 + "Pers " + j + " Gen" + sp; // short for "genotype of person j at this location"
st2 = st2 + "Pers " + j + " Conf" + sp; // short for "confidence in genotype call of ..."
st2 = st2 + "Pers " + j + " NewCvg" + sp; // short for "coverage of person j at this location"
}
String st3 = "TotalCvg";
return st1+st2+st3;
}
private int[] getCoverageByGroup(List<SAMRecord>[] readsByGroup) {
int[] coverage = new int[readsByGroup.length];
for(int iterator = 0; iterator < readsByGroup.length; iterator ++) {
coverage[iterator] = readsByGroup[iterator].size();
}
return coverage;
}
private long[] updateRunningCoverage(long[] cvgUpToNow, int[] newCvgByGroup) {
long[] newCvg = new long[cvgUpToNow.length];
for(int iter = 0; iter < cvgUpToNow.length; iter++) {
newCvg[iter] = cvgUpToNow[iter] + newCvgByGroup[iter];
}
return newCvg;
}
private double[] runningCoverageToDouble(long[] cvg) {
double[] avgProp = new double[cvg.length];
long sum = 0;
for(long elem : cvg) {
sum += elem;
}
for(int iter = 0; iter < cvg.length; iter++) {
avgProp[iter] = cvg[iter]/sum;
}
return avgProp;
}
private void printToFiles(List<SAMRecord>[] sampledNewReads, ArtificialPoolContext context) {
SAMFileWriter samWrite = context.getSAMFileWriter();
String sp = " ";
PrintWriter auxWrite = context.getWriterToAuxiliaryFile();
int readGroupInt = 0;
for(List<SAMRecord> readGroup : sampledNewReads) {
for(SAMRecord read : readGroup) {
samWrite.addAlignment(read);
}
auxWrite.print(context.getAlignmentContext().getLocation().toString() + sp);
auxWrite.print(context.genotypeAndConfidenceToString(readGroupInt,sp));
readGroupInt++;
}
}
}

View File

@ -1,285 +0,0 @@
package org.broadinstitute.sting.playground.utils;
import net.sf.samtools.SAMFileWriter;
import net.sf.samtools.SAMRecord;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.genotyper.*;
import org.broadinstitute.sting.utils.Pair;
import org.broadinstitute.sting.utils.genotype.*;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
/**
* Created by IntelliJ IDEA.
* User: chartl
* Date: Aug 26, 2009
* Time: 11:37:42 AM
* To change this template use File | Settings | File Templates.
*/
public class ArtificialPoolContext {
private PrintWriter writerToAuxiliaryFile;
private SAMFileWriter writerToSamFile;
private UnifiedGenotyper ug;
private List<Set<String>> readGroupSets;
private long[] runningCoverage;
private RefMetaDataTracker refTracker;
private ReferenceContext refContext;
private AlignmentContext aliContext;
public ArtificialPoolContext() {
readGroupSets = null;
writerToAuxiliaryFile = null;
writerToSamFile = null;
ug = null;
refTracker = null;
aliContext = null;
refContext = null;
runningCoverage = null;
}
public ArtificialPoolContext(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
refTracker = tracker;
refContext = ref;
aliContext = context;
readGroupSets = null;
writerToAuxiliaryFile = null;
writerToSamFile=null;
ug = null;
runningCoverage = null;
}
public ArtificialPoolContext(PrintWriter pw, SAMFileWriter sw, UnifiedGenotyper g, List<Set<String>> rgs, long [] runcvg, RefMetaDataTracker rt, ReferenceContext rc, AlignmentContext ac) {
writerToAuxiliaryFile = pw;
writerToSamFile = sw;
ug = g;
readGroupSets = rgs;
runningCoverage = runcvg;
refTracker = rt;
refContext = rc;
aliContext = ac;
}
public void setAuxWriter(PrintWriter writer) {
writerToAuxiliaryFile = writer;
}
public void setSingleSampleGenotyper(UnifiedGenotyper typer) {
ug = typer;
}
public void initializeUG() {
ug.initialize();
}
public void setReadGroupSets(List<Set<String>> rgSets) {
readGroupSets = rgSets;
}
public void setRefMetaDataTracker(RefMetaDataTracker tracker) {
refTracker = tracker;
}
public void setReferenceContext(ReferenceContext ref) {
refContext = ref;
}
public void setAlignmentContext(AlignmentContext context) {
aliContext = context;
}
public void setRunningCoverage(long[] estimate) {
runningCoverage = estimate;
}
public void setSAMFileWriter(SAMFileWriter writer) {
writerToSamFile = writer;
}
public int getTotalNumberOfPeople() {
return readGroupSets.size();
}
public RefMetaDataTracker getRefMetaDataTracker() {
return refTracker;
}
public ReferenceContext getReferenceContext() {
return refContext;
}
public AlignmentContext getAlignmentContext() {
return aliContext;
}
public PrintWriter getWriterToAuxiliaryFile() {
return writerToAuxiliaryFile;
}
public UnifiedGenotyper getSingleSampleGenotyper() {
return ug;
}
public List<Set<String>> getReadGroupSets() {
return readGroupSets;
}
public long[] getRunningCoverage() {
return runningCoverage;
}
public SAMFileWriter getSAMFileWriter() {
return writerToSamFile;
}
public List<SAMRecord> getReads() {
List<SAMRecord> reads;
if(aliContext == null) {
reads=null;
} else {
reads = aliContext.getReads();
}
return reads;
}
public List<Integer> getOffsets() {
List<Integer> offsets;
if(aliContext == null) {
offsets = null;
} else {
offsets = aliContext.getOffsets();
}
return offsets;
}
public List<SAMRecord> getNewReads() {
List<SAMRecord> newReads;
if(aliContext == null) {
newReads = null;
} else {
newReads = new LinkedList<SAMRecord>();
List<SAMRecord> allReads = aliContext.getReads();
List<Integer> allOffsets = aliContext.getOffsets();
for(int iter = 0; iter < allReads.size(); iter++) {
if(allOffsets.get(iter) == 0) {
newReads.add(allReads.get(iter));
}
}
}
return newReads;
}
public Pair<List<SAMRecord>[],List<Integer>[]> splitByGroup(List<SAMRecord> unsplitReads, List<Integer> unsplitOffsets) {
List<SAMRecord>[] readsSplitByGroup;
List<Integer> [] offsetsSplitByGroup;
if(unsplitReads != null && readGroupSets != null) {
readsSplitByGroup = new ArrayList[this.getTotalNumberOfPeople()];
if(unsplitOffsets != null) {
offsetsSplitByGroup = new ArrayList[this.getTotalNumberOfPeople()];
}
else {
offsetsSplitByGroup = null;
}
int listSize = unsplitReads.size();
for(int element = 0; element < listSize; element++) {
SAMRecord read = unsplitReads.get(element);
for(int groupNumber = 0; groupNumber < this.getTotalNumberOfPeople(); groupNumber++) {
if(readGroupSets.get(groupNumber).contains((String) read.getAttribute("RG"))) {
readsSplitByGroup[groupNumber].add(read);
if(offsetsSplitByGroup != null) {
offsetsSplitByGroup[groupNumber].add(unsplitOffsets.get(element));
}
break;
}
}
}
} else {
readsSplitByGroup = null;
offsetsSplitByGroup = null; // compiler complains without these lines
}
return new Pair(readsSplitByGroup,offsetsSplitByGroup);
}
public List<SAMRecord>[] splitReadsByGroup(List<SAMRecord> unsplitReads) {
return (this.splitByGroup(unsplitReads,null)).first;
}
// Static methods follow
public static ArtificialPoolContext mapReduceMerge(ArtificialPoolContext mapContext, ArtificialPoolContext reduceContext) {
return new ArtificialPoolContext(reduceContext.getWriterToAuxiliaryFile(),reduceContext.getSAMFileWriter(),
reduceContext.getSingleSampleGenotyper(), reduceContext.getReadGroupSets(), reduceContext.getRunningCoverage(),
mapContext.getRefMetaDataTracker(),mapContext.getReferenceContext(),mapContext.getAlignmentContext());
}
public static Pair<List<SAMRecord>[],List<Integer>> sampleReadsAndOffsets(List<SAMRecord>[] reads, List<Integer>[] offsets, double[] propEstGlobal) {
double[] samplingRate = calculateSamplingRateFromGlobalEstimate(propEstGlobal);
List<SAMRecord>[] sampledReads = new ArrayList[reads.length];
List<Integer>[] sampledOffsets;
if(offsets != null){
sampledOffsets = new ArrayList[offsets.length];
} else {
sampledOffsets = null;
}
for(int group = 0; group < reads.length; group++) {
for(int readNumber = 0; readNumber < reads[group].size(); readNumber++) {
if(Math.random() < samplingRate[group]) {
sampledReads[group].add(reads[group].get(readNumber));
if(sampledOffsets != null) {
sampledOffsets[group].add(offsets[group].get(readNumber));
}
}
}
}
return new Pair(sampledReads,sampledOffsets);
}
public String genotypeAndConfidenceToString(int group, String spacer) {
Genotype call = this.getGenotype(group);
return (call.getBases() + spacer + call.getNegLog10PError()); // TODO: fix me
}
public Genotype getGenotype(int group) {
AlignmentContext alicon = this.getAlignmentContext();
Pair<List<SAMRecord>[],List<Integer>[]> byGroupSplitPair = this.splitByGroup(alicon.getReads(),alicon.getOffsets());
Pair<List<Genotype>, GenotypeLocusData> result = ug.map(this.getRefMetaDataTracker(),this.getReferenceContext(),
new AlignmentContext(this.getAlignmentContext().getLocation(), byGroupSplitPair.first[group],byGroupSplitPair.second[group]));
return (result.first == null ? null : result.first.get(0));
}
public static List<SAMRecord>[] sampleReads(List<SAMRecord>[] reads, double[] propEstGlobal) {
return (sampleReadsAndOffsets(reads, null, propEstGlobal)).first;
}
public static double[] calculateSamplingRateFromGlobalEstimate(double[] ratios) {
double min = ratios[0];
for(double ratio : ratios) {
if(ratio < min) {
min = ratio;
}
}
double[] samplingRate = new double[ratios.length];
// now divide by minimum
for(int j = 0; j < ratios.length; j++) {
samplingRate[j] = ratios[j]/min;
}
return samplingRate;
}
}

View File

@ -1,43 +0,0 @@
/*
* The Broad Institute
* SOFTWARE COPYRIGHT NOTICE AGREEMENT
* This software and its documentation are copyright 2009 by the
* Broad Institute/Massachusetts Institute of Technology. All rights are reserved.
*
* This software is supplied without any warranty or guaranteed support whatsoever. Neither
* the Broad Institute nor MIT can be responsible for its use, misuse, or functionality.
*/
package org.broadinstitute.sting.playground.utils;
import edu.mit.broad.picard.util.BasicTextFileParser;
import java.io.File;
import java.util.List;
import java.util.Arrays;
/**
* Parser for tab-delimited files
*
* @author Kathleen Tibbetts
*/
public class WhitespaceTextFileParser extends BasicTextFileParser {
/**
* Constructor
*
* @param file The file to parse
*/
public WhitespaceTextFileParser(boolean treatGroupedDelimitersAsOne, File... file) {
super(treatGroupedDelimitersAsOne, file);
}
/**
* Determines whether a given character is a delimiter
*
* @param b the character to evaluate
* @return true if <code>b</code> is a delimiter; otherwise false
*/
protected boolean isDelimiter(byte b) {
return Character.isWhitespace((char)b);
}
}

View File

@ -1,13 +0,0 @@
package org.broadinstitute.sting.utils;
/**
* Created by IntelliJ IDEA.
* User: depristo
* Date: Feb 24, 2009
* Time: 10:15:19 AM
* To change this template use File | Settings | File Templates.
*/
public interface Predicate<T> {
public boolean apply(T arg);
}

View File

@ -1,46 +0,0 @@
package org.broadinstitute.sting.utils.containers;
import java.util.PriorityQueue;
@Deprecated
public class BoundedScoringSet<E extends Comparable<E> > {
private PriorityQueue<E> pq;
private int maximumSize;
public BoundedScoringSet(int maximumSize) {
pq = new PriorityQueue<E>(maximumSize);
this.maximumSize = maximumSize;
}
public boolean add(E o) {
if (canAdd(o)) {
pq.add(o);
while (pq.size() > maximumSize) {
pq.poll();
}
return true;
}
return false;
}
private boolean canAdd(E o) { return pq.size() < maximumSize || o.compareTo(pq.peek()) == 1; }
public void clear() { pq.clear(); }
public boolean contains(E o) { return pq.contains(o); }
public boolean offer(E o) { return pq.offer(o); }
public E peek() { return pq.peek(); }
public E poll() { return pq.poll(); }
public boolean remove(E o) { return pq.remove(o); }
public int size() { return pq.size(); }
public E[] toArray(E[] os) { return pq.toArray(os); }
}

View File

@ -1,116 +0,0 @@
package org.broadinstitute.sting.utils.io;
import org.broadinstitute.sting.utils.StingException;
import java.io.OutputStream;
import java.io.IOException;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileNotFoundException;
/**
* User: hanna
* Date: May 26, 2009
* Time: 3:51:49 PM
* BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
* Software and documentation are copyright 2005 by the Broad Institute.
* All rights are reserved.
*
* Users acknowledge that this software is supplied without any warranty or support.
* The Broad Institute is not responsible for its use, misuse, or
* functionality.
*/
/**
* An output stream that only initializes itself the first time its used.
* Needs a callback that can create an output stream.
*/
@Deprecated
public class LazyFileOutputStream extends OutputStream {
/**
* Generates output files on demand.
*/
private final FileFactory factory;
private File targetFile = null;
/**
* The target for any writes performed by the output stream.
*/
private FileOutputStream targetOutputStream = null;
/**
* Create a new LazyOutputStream, indicating how to create a new stream.
* @param factory Creator of the output stream, when necessary.
*/
public LazyFileOutputStream( FileFactory factory ) {
this.factory = factory;
}
/**
* Indicates whether the LazyOutputStream had to get off its butt and create
* a new output stream.
* @return
*/
public boolean isCreated() {
return targetOutputStream != null;
}
/**
* Public method to return the lazily created file.
* @return Stream created by the lazy loader.
* @throw StingException if no stream was created.
*/
public File getBackingFile() {
if( targetFile == null )
throw new StingException("No lazy-loaded stream was created.");
return targetFile;
}
@Override
public void close() throws IOException {
getBackingOutputStream().close();
}
@Override
public void flush() throws IOException {
getBackingOutputStream().flush();
}
@Override
public void write(byte[] b) throws IOException {
getBackingOutputStream().write(b);
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
getBackingOutputStream().write(b,off,len);
}
@Override
public void write(int b) throws IOException {
getBackingOutputStream().write(b);
}
/**
* Lazy loader for the output stream.
*/
protected OutputStream getBackingOutputStream() {
if( targetOutputStream == null ) {
try {
targetFile = factory.create();
targetOutputStream = new FileOutputStream( targetFile );
}
catch( IOException ex ) {
throw new StingException("Unable to open new temp file", ex );
}
}
return targetOutputStream;
}
/**
* Teaches the LazyOutputStream how to create a new outputstream when necessary.
*/
public interface FileFactory {
public File create() throws IOException;
}
}

View File

@ -1,79 +0,0 @@
package org.broadinstitute.sting.utils.io;
import java.io.OutputStream;
import java.io.IOException;
/**
* User: hanna
* Date: Apr 30, 2009
* Time: 5:53:32 PM
* BROAD INSTITUTE SOFTWARE COPYRIGHT NOTICE AND AGREEMENT
* Software and documentation are copyright 2005 by the Broad Institute.
* All rights are reserved.
*
* Users acknowledge that this software is supplied without any warranty or support.
* The Broad Institute is not responsible for its use, misuse, or
* functionality.
*/
/**
* A stream that allows redirection to a variety of sources transparently to the
* user of the class.
*/
@Deprecated
public class RedirectingOutputStream extends OutputStream {
/**
* Informs us which output stream should be used.
*/
private OutputStreamProvider provider;
/**
* Build a new output stream, given the function telling us where to
* send output.
* @param provider Function which returns an output stream.
*/
public RedirectingOutputStream( OutputStreamProvider provider ) {
this.provider = provider;
}
/**
* Gets the OutputStream backing this redirector now.
* Note that the backing output stream could change at any time.
* Use sparingly (for testing).
*/
public OutputStream getBackingOutputStream() {
return provider.getOutputStream();
}
@Override
public void close() throws IOException {
getBackingOutputStream().close();
}
@Override
public void flush() throws IOException {
getBackingOutputStream().flush();
}
@Override
public void write(byte[] b) throws IOException {
getBackingOutputStream().write(b);
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
getBackingOutputStream().write(b,off,len);
}
@Override
public void write(int b) throws IOException {
getBackingOutputStream().write(b);
}
/**
* Provides whatever output stream this data should go to at the moment.
*/
public interface OutputStreamProvider {
public OutputStream getOutputStream();
}
}

View File

@ -1,11 +0,0 @@
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
<html>
<head>
</head>
<body bgcolor="white">
Provides general-purpose tools for handling freeform access to files
on disk.
</body>
</html>