Merge branch 'master' of ssh://gsa4.broadinstitute.org/humgen/gsa-scr1/gsa-engineering/git/unstable
This commit is contained in:
commit
5be7e0621d
|
|
@ -25,9 +25,6 @@
|
|||
|
||||
package org.broadinstitute.sting.gatk.walkers.compression.reducereads;
|
||||
|
||||
import net.sf.samtools.Cigar;
|
||||
import net.sf.samtools.CigarElement;
|
||||
import net.sf.samtools.CigarOperator;
|
||||
import net.sf.samtools.util.SequenceUtil;
|
||||
import org.broadinstitute.sting.commandline.Argument;
|
||||
import org.broadinstitute.sting.commandline.Hidden;
|
||||
|
|
@ -183,7 +180,7 @@ public class ReduceReads extends ReadWalker<LinkedList<GATKSAMRecord>, ReduceRea
|
|||
* A value of 0 turns downsampling off.
|
||||
*/
|
||||
@Argument(fullName = "downsample_coverage", shortName = "ds", doc = "", required = false)
|
||||
protected int downsampleCoverage = 0;
|
||||
protected int downsampleCoverage = 250;
|
||||
|
||||
@Hidden
|
||||
@Argument(fullName = "", shortName = "dl", doc = "", required = false)
|
||||
|
|
@ -535,81 +532,12 @@ public class ReduceReads extends ReadWalker<LinkedList<GATKSAMRecord>, ReduceRea
|
|||
if (debugLevel == 1)
|
||||
System.out.println("BAM: " + read.getCigar() + " " + read.getAlignmentStart() + " " + read.getAlignmentEnd());
|
||||
|
||||
// if (!DONT_USE_SOFTCLIPPED_BASES)
|
||||
// reSoftClipBases(read);
|
||||
|
||||
if (!DONT_COMPRESS_READ_NAMES)
|
||||
compressReadName(read);
|
||||
|
||||
out.addAlignment(read);
|
||||
}
|
||||
|
||||
private void reSoftClipBases(GATKSAMRecord read) {
|
||||
Integer left = (Integer) read.getTemporaryAttribute("SL");
|
||||
Integer right = (Integer) read.getTemporaryAttribute("SR");
|
||||
if (left != null || right != null) {
|
||||
Cigar newCigar = new Cigar();
|
||||
for (CigarElement element : read.getCigar().getCigarElements()) {
|
||||
newCigar.add(new CigarElement(element.getLength(), element.getOperator()));
|
||||
}
|
||||
|
||||
if (left != null) {
|
||||
newCigar = updateFirstSoftClipCigarElement(left, newCigar);
|
||||
read.setAlignmentStart(read.getAlignmentStart() + left);
|
||||
}
|
||||
|
||||
if (right != null) {
|
||||
Cigar invertedCigar = invertCigar(newCigar);
|
||||
newCigar = invertCigar(updateFirstSoftClipCigarElement(right, invertedCigar));
|
||||
}
|
||||
read.setCigar(newCigar);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Facility routine to revert the first element of a Cigar string (skipping hard clips) into a soft-clip.
|
||||
* To be used on both ends if provided a flipped Cigar
|
||||
*
|
||||
* @param softClipSize the length of the soft clipped element to add
|
||||
* @param originalCigar the original Cigar string
|
||||
* @return a new Cigar object with the soft clips added
|
||||
*/
|
||||
private Cigar updateFirstSoftClipCigarElement (int softClipSize, Cigar originalCigar) {
|
||||
Cigar result = new Cigar();
|
||||
CigarElement leftElement = new CigarElement(softClipSize, CigarOperator.S);
|
||||
boolean updated = false;
|
||||
for (CigarElement element : originalCigar.getCigarElements()) {
|
||||
if (!updated && element.getOperator() == CigarOperator.M) {
|
||||
result.add(leftElement);
|
||||
int newLength = element.getLength() - softClipSize;
|
||||
if (newLength > 0)
|
||||
result.add(new CigarElement(newLength, CigarOperator.M));
|
||||
updated = true;
|
||||
}
|
||||
else
|
||||
result.add(element);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a cigar string, returns the inverted cigar string.
|
||||
*
|
||||
* @param cigar the original cigar
|
||||
* @return the inverted cigar
|
||||
*/
|
||||
private Cigar invertCigar(Cigar cigar) {
|
||||
Stack<CigarElement> stack = new Stack<CigarElement>();
|
||||
for (CigarElement e : cigar.getCigarElements())
|
||||
stack.push(e);
|
||||
Cigar inverted = new Cigar();
|
||||
while (!stack.empty()) {
|
||||
inverted.add(stack.pop());
|
||||
}
|
||||
return inverted;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Quality control procedure that checks if the consensus reads contains too many
|
||||
* mismatches with the reference. This should never happen and is a good trigger for
|
||||
|
|
|
|||
|
|
@ -499,7 +499,7 @@ public class SlidingWindow {
|
|||
result.addAll(addToSyntheticReads(0, start));
|
||||
result.addAll(finalizeAndAdd(ConsensusType.BOTH));
|
||||
|
||||
for (GATKSAMRecord read : result) {
|
||||
for (GATKSAMRecord read : allReads) {
|
||||
readsInWindow.remove(read); // todo -- not optimal, but needs to be done so the next region doesn't try to remove the same reads from the header counts.
|
||||
}
|
||||
|
||||
|
|
@ -627,7 +627,7 @@ public class SlidingWindow {
|
|||
int locationIndex = startLocation < 0 ? 0 : readStart - startLocation;
|
||||
|
||||
if (removeRead && locationIndex < 0)
|
||||
throw new ReviewedStingException("read is behind the Sliding Window. read: " + read + " cigar: " + read.getCigarString() + " window: " + startLocation + "," + stopLocation);
|
||||
throw new ReviewedStingException("read is behind the Sliding Window. read: " + read + " start " + read.getUnclippedStart() + "," + read.getUnclippedEnd() + " cigar: " + read.getCigarString() + " window: " + startLocation + "," + stopLocation);
|
||||
|
||||
if (!removeRead) { // we only need to create new header elements if we are adding the read, not when we're removing it
|
||||
if (locationIndex < 0) { // Do we need to add extra elements before the start of the header? -- this may happen if the previous read was clipped and this alignment starts before the beginning of the window
|
||||
|
|
|
|||
|
|
@ -332,11 +332,11 @@ public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implem
|
|||
final Map<String, AlignmentContext> splitContexts = AlignmentContextUtils.splitContextBySampleName(context);
|
||||
final GenotypesContext genotypes = GenotypesContext.create(splitContexts.keySet().size());
|
||||
final MathUtils.RunningAverage averageHQSoftClips = new MathUtils.RunningAverage();
|
||||
for( final String sample : splitContexts.keySet() ) {
|
||||
for( Map.Entry<String, AlignmentContext> sample : splitContexts.entrySet() ) {
|
||||
final double[] genotypeLikelihoods = new double[3]; // ref versus non-ref (any event)
|
||||
Arrays.fill(genotypeLikelihoods, 0.0);
|
||||
|
||||
for( final PileupElement p : splitContexts.get(sample).getBasePileup() ) {
|
||||
for( final PileupElement p : sample.getValue().getBasePileup() ) {
|
||||
final byte qual = ( USE_EXPANDED_TRIGGER_SET ?
|
||||
( p.isNextToSoftClip() || p.isBeforeInsertion() || p.isAfterInsertion() ? ( p.getQual() > QualityUtils.MIN_USABLE_Q_SCORE ? p.getQual() : (byte) 20 ) : p.getQual() )
|
||||
: p.getQual() );
|
||||
|
|
@ -362,7 +362,7 @@ public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implem
|
|||
genotypeLikelihoods[BB] += p.getRepresentativeCount() * QualityUtils.qualToErrorProbLog10(qual) + LOG_ONE_THIRD;
|
||||
}
|
||||
}
|
||||
genotypes.add( new GenotypeBuilder(sample).alleles(noCall).PL(genotypeLikelihoods).make() );
|
||||
genotypes.add( new GenotypeBuilder(sample.getKey()).alleles(noCall).PL(genotypeLikelihoods).make() );
|
||||
}
|
||||
|
||||
final ArrayList<Allele> alleles = new ArrayList<Allele>();
|
||||
|
|
|
|||
|
|
@ -53,8 +53,8 @@ public class LikelihoodCalculationEngine {
|
|||
public void computeReadLikelihoods( final ArrayList<Haplotype> haplotypes, final HashMap<String, ArrayList<GATKSAMRecord>> perSampleReadList ) {
|
||||
|
||||
int X_METRIC_LENGTH = 0;
|
||||
for( final String sample : perSampleReadList.keySet() ) {
|
||||
for( final GATKSAMRecord read : perSampleReadList.get(sample) ) {
|
||||
for( final Map.Entry<String, ArrayList<GATKSAMRecord>> sample : perSampleReadList.entrySet() ) {
|
||||
for( final GATKSAMRecord read : sample.getValue() ) {
|
||||
final int readLength = read.getReadLength();
|
||||
if( readLength > X_METRIC_LENGTH ) { X_METRIC_LENGTH = readLength; }
|
||||
}
|
||||
|
|
@ -326,9 +326,9 @@ public class LikelihoodCalculationEngine {
|
|||
public static Map<String, Map<Allele, List<GATKSAMRecord>>> partitionReadsBasedOnLikelihoods( final GenomeLocParser parser, final HashMap<String, ArrayList<GATKSAMRecord>> perSampleReadList, final HashMap<String, ArrayList<GATKSAMRecord>> perSampleFilteredReadList, final Pair<VariantContext, HashMap<Allele,ArrayList<Haplotype>>> call) {
|
||||
final Map<String, Map<Allele, List<GATKSAMRecord>>> returnMap = new HashMap<String, Map<Allele, List<GATKSAMRecord>>>();
|
||||
final GenomeLoc callLoc = parser.createGenomeLoc(call.getFirst());
|
||||
for( final String sample : perSampleReadList.keySet() ) {
|
||||
for( final Map.Entry<String, ArrayList<GATKSAMRecord>> sample : perSampleReadList.entrySet() ) {
|
||||
final Map<Allele, List<GATKSAMRecord>> alleleReadMap = new HashMap<Allele, List<GATKSAMRecord>>();
|
||||
final ArrayList<GATKSAMRecord> readsForThisSample = perSampleReadList.get(sample);
|
||||
final ArrayList<GATKSAMRecord> readsForThisSample = sample.getValue();
|
||||
for( int iii = 0; iii < readsForThisSample.size(); iii++ ) {
|
||||
final GATKSAMRecord read = readsForThisSample.get(iii); // BUGBUG: assumes read order in this list and haplotype likelihood list are the same!
|
||||
// only count the read if it overlaps the event, otherwise it is not added to the output read list at all
|
||||
|
|
@ -338,7 +338,7 @@ public class LikelihoodCalculationEngine {
|
|||
for( final Allele a : call.getFirst().getAlleles() ) { // find the allele with the highest haplotype likelihood
|
||||
double maxLikelihood = Double.NEGATIVE_INFINITY;
|
||||
for( final Haplotype h : call.getSecond().get(a) ) { // use the max likelihood from all the haplotypes which mapped to this allele (achieved via the haplotype mapper object)
|
||||
final double likelihood = h.getReadLikelihoods(sample)[iii];
|
||||
final double likelihood = h.getReadLikelihoods(sample.getKey())[iii];
|
||||
if( likelihood > maxLikelihood ) {
|
||||
maxLikelihood = likelihood;
|
||||
}
|
||||
|
|
@ -373,13 +373,13 @@ public class LikelihoodCalculationEngine {
|
|||
readList = new ArrayList<GATKSAMRecord>();
|
||||
alleleReadMap.put(Allele.NO_CALL, readList);
|
||||
}
|
||||
for( final GATKSAMRecord read : perSampleFilteredReadList.get(sample) ) {
|
||||
for( final GATKSAMRecord read : perSampleFilteredReadList.get(sample.getKey()) ) {
|
||||
// only count the read if it overlaps the event, otherwise it is not added to the output read list at all
|
||||
if( callLoc.overlapsP(parser.createGenomeLoc(read)) ) {
|
||||
readList.add(read);
|
||||
}
|
||||
}
|
||||
returnMap.put(sample, alleleReadMap);
|
||||
returnMap.put(sample.getKey(), alleleReadMap);
|
||||
}
|
||||
return returnMap;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -21,28 +21,28 @@ public class ReduceReadsIntegrationTest extends WalkerTest {
|
|||
|
||||
@Test(enabled = true)
|
||||
public void testDefaultCompression() {
|
||||
RRTest("testDefaultCompression ", L, "323dd4deabd7767efa0f2c6e7fa4189f");
|
||||
RRTest("testDefaultCompression ", L, "72eb6db9d7a09a0cc25eaac1aafa97b7");
|
||||
}
|
||||
|
||||
@Test(enabled = true)
|
||||
public void testMultipleIntervals() {
|
||||
String intervals = "-L 20:10,100,000-10,100,500 -L 20:10,200,000-10,200,500 -L 20:10,300,000-10,300,500 -L 20:10,400,000-10,500,000 -L 20:10,500,050-10,500,060 -L 20:10,600,000-10,600,015 -L 20:10,700,000-10,700,110";
|
||||
RRTest("testMultipleIntervals ", intervals, "c437fb160547ff271f8eba30e5f3ff76");
|
||||
RRTest("testMultipleIntervals ", intervals, "104b1a1d9fa5394c6fea95cd32967b78");
|
||||
}
|
||||
|
||||
@Test(enabled = true)
|
||||
public void testHighCompression() {
|
||||
RRTest("testHighCompression ", " -cs 10 -minvar 0.3 -mindel 0.3 " + L, "3a607bc3ebaf84e9dc44e005c5f8a047");
|
||||
RRTest("testHighCompression ", " -cs 10 -minvar 0.3 -mindel 0.3 " + L, "c55140cec60fa8c35161680289d74d47");
|
||||
}
|
||||
|
||||
@Test(enabled = true)
|
||||
public void testLowCompression() {
|
||||
RRTest("testLowCompression ", " -cs 30 -minvar 0.01 -mindel 0.01 -minmap 5 -minqual 5 " + L, "afd39459c841b68a442abdd5ef5f8f27");
|
||||
RRTest("testLowCompression ", " -cs 30 -minvar 0.01 -mindel 0.01 -minmap 5 -minqual 5 " + L, "0f2e57b7f6de03cc4da1ffcc8cf8f1a7");
|
||||
}
|
||||
|
||||
@Test(enabled = true)
|
||||
public void testIndelCompression() {
|
||||
RRTest("testIndelCompression ", " -cs 50 -L 20:10,100,500-10,100,600 ", "f7b9fa44c10bc4b2247813d2b8dc1973");
|
||||
RRTest("testIndelCompression ", " -cs 50 -L 20:10,100,500-10,100,600 ", "dda0c95f56f90e5f633c2437c2b21031");
|
||||
}
|
||||
|
||||
@Test(enabled = true)
|
||||
|
|
|
|||
|
|
@ -309,7 +309,6 @@ public class VariantContextAdaptors {
|
|||
int index = hapmap.getStart() - ref.getWindow().getStart();
|
||||
if ( index < 0 )
|
||||
return null; // we weren't given enough reference context to create the VariantContext
|
||||
Byte refBaseForIndel = new Byte(ref.getBases()[index]);
|
||||
|
||||
HashSet<Allele> alleles = new HashSet<Allele>();
|
||||
Allele refSNPAllele = Allele.create(ref.getBase(), true);
|
||||
|
|
|
|||
|
|
@ -574,7 +574,7 @@ public class ClipReads extends ReadWalker<ClipReads.ReadClipperWithData, ClipRea
|
|||
}
|
||||
}
|
||||
|
||||
public class ReadClipperWithData extends ReadClipper {
|
||||
public static class ReadClipperWithData extends ReadClipper {
|
||||
private ClippingData data;
|
||||
|
||||
public ReadClipperWithData(GATKSAMRecord read, List<SeqToClip> clipSeqs) {
|
||||
|
|
|
|||
|
|
@ -65,12 +65,12 @@ public class BaseQualityRankSumTest extends RankSumTest implements StandardAnnot
|
|||
// by design, first element in LinkedHashMap was ref allele
|
||||
double refLikelihood=0.0, altLikelihood=Double.NEGATIVE_INFINITY;
|
||||
|
||||
for (Allele a : el.keySet()) {
|
||||
for (Map.Entry<Allele, Double> entry : el.entrySet()) {
|
||||
|
||||
if (a.isReference())
|
||||
refLikelihood =el.get(a);
|
||||
if (entry.getKey().isReference())
|
||||
refLikelihood = entry.getValue();
|
||||
else {
|
||||
double like = el.get(a);
|
||||
double like = entry.getValue();
|
||||
if (like >= altLikelihood)
|
||||
altLikelihood = like;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -291,8 +291,8 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat
|
|||
|
||||
int[][] table = new int[2][2];
|
||||
|
||||
for ( String sample : stratifiedContexts.keySet() ) {
|
||||
final AlignmentContext context = stratifiedContexts.get(sample);
|
||||
for ( Map.Entry<String, AlignmentContext> sample : stratifiedContexts.entrySet() ) {
|
||||
final AlignmentContext context = sample.getValue();
|
||||
if ( context == null )
|
||||
continue;
|
||||
|
||||
|
|
@ -313,12 +313,12 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat
|
|||
|
||||
double refLikelihood=0.0, altLikelihood=Double.NEGATIVE_INFINITY;
|
||||
|
||||
for (Allele a : el.keySet()) {
|
||||
for (Map.Entry<Allele,Double> entry : el.entrySet()) {
|
||||
|
||||
if (a.isReference())
|
||||
refLikelihood =el.get(a);
|
||||
if (entry.getKey().isReference())
|
||||
refLikelihood = entry.getValue();
|
||||
else {
|
||||
double like = el.get(a);
|
||||
double like = entry.getValue();
|
||||
if (like >= altLikelihood)
|
||||
altLikelihood = like;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -103,7 +103,7 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
|
|||
return map;
|
||||
}
|
||||
|
||||
private class HaplotypeComparator implements Comparator<Haplotype> {
|
||||
private static class HaplotypeComparator implements Comparator<Haplotype> {
|
||||
|
||||
public int compare(Haplotype a, Haplotype b) {
|
||||
if (a.getQualitySum() < b.getQualitySum())
|
||||
|
|
@ -362,8 +362,8 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
|
|||
// Score all the reads in the pileup, even the filtered ones
|
||||
final double[] scores = new double[el.size()];
|
||||
int i = 0;
|
||||
for (Allele a : el.keySet()) {
|
||||
scores[i++] = -el.get(a);
|
||||
for (Map.Entry<Allele, Double> a : el.entrySet()) {
|
||||
scores[i++] = -a.getValue();
|
||||
if (DEBUG) {
|
||||
System.out.printf(" vs. haplotype %d = %f%n", i - 1, scores[i - 1]);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -61,12 +61,12 @@ public class MappingQualityRankSumTest extends RankSumTest implements StandardAn
|
|||
// by design, first element in LinkedHashMap was ref allele
|
||||
double refLikelihood=0.0, altLikelihood=Double.NEGATIVE_INFINITY;
|
||||
|
||||
for (Allele a : el.keySet()) {
|
||||
for (Map.Entry<Allele,Double> a : el.entrySet()) {
|
||||
|
||||
if (a.isReference())
|
||||
refLikelihood =el.get(a);
|
||||
if (a.getKey().isReference())
|
||||
refLikelihood = a.getValue();
|
||||
else {
|
||||
double like = el.get(a);
|
||||
double like = a.getValue();
|
||||
if (like >= altLikelihood)
|
||||
altLikelihood = like;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -87,11 +87,11 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio
|
|||
LinkedHashMap<Allele, Double> el = indelLikelihoodMap.get(p); // retrieve likelihood information corresponding to this read
|
||||
double refLikelihood = 0.0, altLikelihood = Double.NEGATIVE_INFINITY; // by design, first element in LinkedHashMap was ref allele
|
||||
|
||||
for (Allele a : el.keySet()) {
|
||||
if (a.isReference())
|
||||
refLikelihood = el.get(a);
|
||||
for (Map.Entry<Allele,Double> a : el.entrySet()) {
|
||||
if (a.getKey().isReference())
|
||||
refLikelihood = a.getValue();
|
||||
else {
|
||||
double like = el.get(a);
|
||||
double like = a.getValue();
|
||||
if (like >= altLikelihood)
|
||||
altLikelihood = like;
|
||||
}
|
||||
|
|
@ -100,7 +100,6 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio
|
|||
int readPos = getOffsetFromClippedReadStart(p.getRead(), p.getOffset());
|
||||
final int numAlignedBases = getNumAlignedBases(p.getRead());
|
||||
|
||||
int rp = readPos;
|
||||
if (readPos > numAlignedBases / 2) {
|
||||
readPos = numAlignedBases - (readPos + 1);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@
|
|||
|
||||
package org.broadinstitute.sting.gatk.walkers.diagnostics.targets;
|
||||
|
||||
import org.broadinstitute.sting.commandline.Argument;
|
||||
import org.broadinstitute.sting.commandline.Output;
|
||||
import org.broadinstitute.sting.gatk.CommandLineGATK;
|
||||
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
|
||||
|
|
@ -46,6 +47,9 @@ public class FindCoveredIntervals extends ActiveRegionWalker<GenomeLoc, Long> {
|
|||
@Output(required = true)
|
||||
private PrintStream out;
|
||||
|
||||
@Argument(fullName = "coverage_threshold", shortName = "cov", doc = "The minimum allowable coverage to be considered covered", required = false)
|
||||
private int coverageThreshold = 20;
|
||||
|
||||
@Override
|
||||
// Look to see if the region has sufficient coverage
|
||||
public ActivityProfileResult isActive(final RefMetaDataTracker tracker, final ReferenceContext ref, final AlignmentContext context) {
|
||||
|
|
@ -53,8 +57,7 @@ public class FindCoveredIntervals extends ActiveRegionWalker<GenomeLoc, Long> {
|
|||
int depth = ThresHolder.DEFAULTS.getFilteredCoverage(context.getBasePileup());
|
||||
|
||||
// note the linear probability scale
|
||||
int coverageThreshold = 20;
|
||||
return new ActivityProfileResult(Math.min((double) depth / coverageThreshold, 1));
|
||||
return new ActivityProfileResult(Math.min(depth / coverageThreshold, 1));
|
||||
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -148,8 +148,8 @@ public class ConsensusAlleleCounter {
|
|||
boolean foundKey = false;
|
||||
// copy of hashmap into temp arrayList
|
||||
ArrayList<Pair<String,Integer>> cList = new ArrayList<Pair<String,Integer>>();
|
||||
for (String s : consensusIndelStrings.keySet()) {
|
||||
cList.add(new Pair<String, Integer>(s,consensusIndelStrings.get(s)));
|
||||
for (Map.Entry<String, Integer> s : consensusIndelStrings.entrySet()) {
|
||||
cList.add(new Pair<String, Integer>(s.getKey(), s.getValue()));
|
||||
}
|
||||
|
||||
if (read.getAlignmentEnd() == loc.getStart()) {
|
||||
|
|
|
|||
|
|
@ -208,7 +208,7 @@ public class SNPGenotypeLikelihoodsCalculationModel extends GenotypeLikelihoodsC
|
|||
return new ReadBackedPileupImpl( pileup.getLocation(), BAQedElements );
|
||||
}
|
||||
|
||||
public class BAQedPileupElement extends PileupElement {
|
||||
public static class BAQedPileupElement extends PileupElement {
|
||||
public BAQedPileupElement( final PileupElement PE ) {
|
||||
super(PE.getRead(), PE.getOffset(), PE.isDeletion(), PE.isBeforeDeletedBase(), PE.isAfterDeletedBase(), PE.isBeforeInsertion(), PE.isAfterInsertion(), PE.isNextToSoftClip());
|
||||
}
|
||||
|
|
|
|||
|
|
@ -263,7 +263,6 @@ public class UnifiedArgumentCollection {
|
|||
uac.referenceSampleName = referenceSampleName;
|
||||
uac.samplePloidy = samplePloidy;
|
||||
uac.maxQualityScore = minQualityScore;
|
||||
uac.maxQualityScore = maxQualityScore;
|
||||
uac.phredScaledPrior = phredScaledPrior;
|
||||
uac.minPower = minPower;
|
||||
uac.minReferenceDepth = minReferenceDepth;
|
||||
|
|
|
|||
|
|
@ -124,7 +124,7 @@ public class ConstrainedMateFixingManager {
|
|||
return first;
|
||||
}
|
||||
|
||||
private class SAMRecordHashObject {
|
||||
private static class SAMRecordHashObject {
|
||||
public SAMRecord record;
|
||||
public boolean wasModified;
|
||||
|
||||
|
|
|
|||
|
|
@ -35,6 +35,7 @@ import org.broadinstitute.sting.utils.variantcontext.Allele;
|
|||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class HaplotypeIndelErrorModel {
|
||||
|
||||
|
|
@ -427,8 +428,8 @@ public class HaplotypeIndelErrorModel {
|
|||
// for each read/haplotype combination, compute likelihoods, ie -10*log10(Pr(R | Hi))
|
||||
// = sum_j(-10*log10(Pr(R_j | Hi) since reads are assumed to be independent
|
||||
int j=0;
|
||||
for (Allele a: haplotypesInVC.keySet()) {
|
||||
readLikelihoods[i][j]= computeReadLikelihoodGivenHaplotype(haplotypesInVC.get(a), read);
|
||||
for (Map.Entry<Allele,Haplotype> a: haplotypesInVC.entrySet()) {
|
||||
readLikelihoods[i][j]= computeReadLikelihoodGivenHaplotype(a.getValue(), read);
|
||||
if (DEBUG) {
|
||||
System.out.print(read.getReadName()+" ");
|
||||
|
||||
|
|
|
|||
|
|
@ -332,7 +332,7 @@ public class RealignerTargetCreator extends RodWalker<RealignerTargetCreator.Eve
|
|||
|
||||
private enum EVENT_TYPE { POINT_EVENT, INDEL_EVENT, BOTH }
|
||||
|
||||
class EventPair {
|
||||
static class EventPair {
|
||||
public Event left, right;
|
||||
public TreeSet<GenomeLoc> intervals = new TreeSet<GenomeLoc>();
|
||||
|
||||
|
|
|
|||
|
|
@ -1304,7 +1304,7 @@ public class SomaticIndelDetector extends ReadWalker<Integer,Integer> {
|
|||
|
||||
@Override
|
||||
public Integer reduceInit() {
|
||||
return new Integer(0);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -426,10 +426,10 @@ public class PhaseByTransmission extends RodWalker<HashMap<Byte,Integer>, HashMa
|
|||
Map<String,Set<Sample>> families = this.getSampleDB().getFamilies();
|
||||
Set<Sample> family;
|
||||
ArrayList<Sample> parents;
|
||||
for(String familyID : families.keySet()){
|
||||
family = families.get(familyID);
|
||||
for(Map.Entry<String,Set<Sample>> familyEntry : families.entrySet()){
|
||||
family = familyEntry.getValue();
|
||||
if(family.size()<2 || family.size()>3){
|
||||
logger.info(String.format("Caution: Family %s has %d members; At the moment Phase By Transmission only supports trios and parent/child pairs. Family skipped.",familyID,family.size()));
|
||||
logger.info(String.format("Caution: Family %s has %d members; At the moment Phase By Transmission only supports trios and parent/child pairs. Family skipped.",familyEntry.getKey(),family.size()));
|
||||
}
|
||||
else{
|
||||
for(Sample familyMember : family){
|
||||
|
|
@ -438,7 +438,7 @@ public class PhaseByTransmission extends RodWalker<HashMap<Byte,Integer>, HashMa
|
|||
if(family.containsAll(parents))
|
||||
this.trios.add(familyMember);
|
||||
else
|
||||
logger.info(String.format("Caution: Family %s skipped as it is not a trio nor a parent/child pair; At the moment Phase By Transmission only supports trios and parent/child pairs. Family skipped.",familyID));
|
||||
logger.info(String.format("Caution: Family %s skipped as it is not a trio nor a parent/child pair; At the moment Phase By Transmission only supports trios and parent/child pairs. Family skipped.",familyEntry.getKey()));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -183,13 +183,13 @@ public class VariantEvalReportWriter {
|
|||
throw new ReviewedStingException("Datamap is empty for analysis " + scanner.getAnalysis());
|
||||
|
||||
// add DataPoint's for each field marked as such
|
||||
for (final Field field : datamap.keySet()) {
|
||||
for (final Map.Entry<Field, DataPoint> field : datamap.entrySet()) {
|
||||
try {
|
||||
field.setAccessible(true);
|
||||
field.getKey().setAccessible(true);
|
||||
|
||||
// this is an atomic value, add a column for it
|
||||
final String format = datamap.get(field).format();
|
||||
table.addColumn(field.getName(), format);
|
||||
final String format = field.getValue().format();
|
||||
table.addColumn(field.getKey().getName(), format);
|
||||
} catch (SecurityException e) {
|
||||
throw new StingException("SecurityException: " + e);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -34,15 +34,13 @@ import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
|
|||
import org.broadinstitute.sting.gatk.walkers.*;
|
||||
import org.broadinstitute.sting.utils.SampleUtils;
|
||||
import org.broadinstitute.sting.utils.codecs.vcf.VCFHeader;
|
||||
import org.broadinstitute.sting.utils.codecs.vcf.VCFHeaderLine;
|
||||
import org.broadinstitute.sting.utils.codecs.vcf.VCFUtils;
|
||||
import org.broadinstitute.sting.utils.help.DocumentedGATKFeature;
|
||||
import org.broadinstitute.sting.utils.variantcontext.writer.VariantContextWriter;
|
||||
import org.broadinstitute.sting.utils.variantcontext.VariantContext;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* Filters a lifted-over VCF file for ref bases that have been changed.
|
||||
|
|
@ -66,7 +64,7 @@ public class FilterLiftedVariants extends RodWalker<Integer, Integer> {
|
|||
Set<String> samples = SampleUtils.getSampleListWithVCFHeader(getToolkit(), Arrays.asList(trackName));
|
||||
Map<String, VCFHeader> vcfHeaders = VCFUtils.getVCFHeadersFromRods(getToolkit(), Arrays.asList(trackName));
|
||||
|
||||
final VCFHeader vcfHeader = new VCFHeader(vcfHeaders.containsKey(trackName) ? vcfHeaders.get(trackName).getMetaDataInSortedOrder() : null, samples);
|
||||
final VCFHeader vcfHeader = new VCFHeader(vcfHeaders.containsKey(trackName) ? vcfHeaders.get(trackName).getMetaDataInSortedOrder() : Collections.<VCFHeaderLine>emptySet(), samples);
|
||||
writer.writeHeader(vcfHeader);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -329,7 +329,7 @@ public class SelectVariants extends RodWalker<Integer, Integer> implements TreeR
|
|||
|
||||
|
||||
/* Private class used to store the intermediate variants in the integer random selection process */
|
||||
private class RandomVariantStructure {
|
||||
private static class RandomVariantStructure {
|
||||
private VariantContext vc;
|
||||
|
||||
RandomVariantStructure(VariantContext vcP) {
|
||||
|
|
|
|||
|
|
@ -329,7 +329,7 @@ public class SequenceDictionaryUtils {
|
|||
*/
|
||||
private static class CompareSequenceRecordsByIndex implements Comparator<SAMSequenceRecord> {
|
||||
public int compare(SAMSequenceRecord x, SAMSequenceRecord y) {
|
||||
return new Integer(x.getSequenceIndex()).compareTo(y.getSequenceIndex());
|
||||
return Integer.valueOf(x.getSequenceIndex()).compareTo(y.getSequenceIndex());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -538,7 +538,7 @@ public class ClippingOp {
|
|||
return 0;
|
||||
}
|
||||
|
||||
private class CigarShift {
|
||||
private static class CigarShift {
|
||||
private Cigar cigar;
|
||||
private int shiftFromStart;
|
||||
private int shiftFromEnd;
|
||||
|
|
|
|||
|
|
@ -492,6 +492,6 @@ public final class BCF2Codec implements FeatureCodec<VariantContext> {
|
|||
}
|
||||
|
||||
private final void error(final String message) throws RuntimeException {
|
||||
throw new UserException.MalformedBCF2(String.format("At record %d with position %d:", recordNo, pos, message));
|
||||
throw new UserException.MalformedBCF2(String.format("%s, at record %d with position %d:", message, recordNo, pos));
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -681,8 +681,8 @@ public class IntervalUtils {
|
|||
|
||||
LinkedHashMap<String, List<GenomeLoc>> locsByContig = splitByContig(sorted);
|
||||
List<GenomeLoc> expanded = new ArrayList<GenomeLoc>();
|
||||
for (String contig: locsByContig.keySet()) {
|
||||
List<GenomeLoc> contigLocs = locsByContig.get(contig);
|
||||
for (Map.Entry<String, List<GenomeLoc>> contig: locsByContig.entrySet()) {
|
||||
List<GenomeLoc> contigLocs = contig.getValue();
|
||||
int contigLocsSize = contigLocs.size();
|
||||
|
||||
GenomeLoc startLoc, stopLoc;
|
||||
|
|
|
|||
|
|
@ -223,7 +223,7 @@ public class QualQuantizer {
|
|||
|
||||
@Override
|
||||
public int compareTo(final QualInterval qualInterval) {
|
||||
return new Integer(this.qStart).compareTo(qualInterval.qStart);
|
||||
return Integer.valueOf(this.qStart).compareTo(qualInterval.qStart);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -401,10 +401,11 @@ public class RecalDatumNode<T extends RecalDatum> {
|
|||
|
||||
if ( minPenaltyNode == null || minPenaltyNode.getSecond() > maxPenalty ) {
|
||||
// nothing to merge, or the best candidate is above our max allowed
|
||||
if ( minPenaltyNode == null )
|
||||
if ( minPenaltyNode == null ) {
|
||||
if ( logger.isDebugEnabled() ) logger.debug("Stopping because no candidates could be found");
|
||||
else
|
||||
} else {
|
||||
if ( logger.isDebugEnabled() ) logger.debug("Stopping because node " + minPenaltyNode.getFirst() + " has penalty " + minPenaltyNode.getSecond() + " > max " + maxPenalty);
|
||||
}
|
||||
break;
|
||||
} else {
|
||||
// remove the lowest penalty element, and continue
|
||||
|
|
|
|||
Loading…
Reference in New Issue