Various code improvements based on FindBugs

git-svn-id: file:///humgen/gsa-scr1/gsa-engineering/svn_contents/trunk@3755 348d0f76-0448-11de-a6fe-93d51630548a
This commit is contained in:
ebanks 2010-07-09 15:04:48 +00:00
parent abaec13e38
commit 405be230d0
19 changed files with 68 additions and 64 deletions

View File

@ -37,6 +37,11 @@ import net.sf.samtools.SAMRecord;
public class BadMateFilter implements SamRecordFilter {
public boolean filterOut(final SAMRecord rec) {
return (rec.getReadPairedFlag() && !rec.getMateUnmappedFlag() && rec.getMateReferenceIndex() != rec.getReferenceIndex());
return hasBadMate(rec);
}
public static boolean hasBadMate(final SAMRecord rec) {
return (rec.getReadPairedFlag() && !rec.getMateUnmappedFlag() && !rec.getReferenceIndex().equals(rec.getMateReferenceIndex()));
}
}

View File

@ -22,8 +22,8 @@ public class DepthOfCoverage implements InfoFieldAnnotation, StandardAnnotation
return null;
int depth = 0;
for ( String sample : stratifiedContexts.keySet() )
depth += stratifiedContexts.get(sample).getContext(StratifiedAlignmentContext.StratifiedContextType.COMPLETE).size();
for ( Map.Entry<String, StratifiedAlignmentContext> sample : stratifiedContexts.entrySet() )
depth += sample.getValue().getContext(StratifiedAlignmentContext.StratifiedContextType.COMPLETE).size();
Map<String, Object> map = new HashMap<String, Object>();
map.put(getKeyNames().get(0), String.format("%d", depth));
return map;

View File

@ -237,7 +237,7 @@ public class HaplotypeScore implements InfoFieldAnnotation, StandardAnnotation {
public String toString() { return new String(this.bases); }
}
private class BaseQual extends Pair<Byte, Byte> {
private static class BaseQual extends Pair<Byte, Byte> {
public BaseQual(byte base, byte qual) {
super(base, qual);
}

View File

@ -25,9 +25,9 @@ public class LowMQ implements InfoFieldAnnotation {
double mq0 = 0;
double mq10 = 0;
double total = 0;
for ( String sample : stratifiedContexts.keySet() )
for ( Map.Entry<String, StratifiedAlignmentContext> sample : stratifiedContexts.entrySet() )
{
ReadBackedPileup pileup = stratifiedContexts.get(sample).getContext(StratifiedAlignmentContext.StratifiedContextType.COMPLETE).getBasePileup();
ReadBackedPileup pileup = sample.getValue().getContext(StratifiedAlignmentContext.StratifiedContextType.COMPLETE).getBasePileup();
for (PileupElement p : pileup )
{
if ( p.getMappingQual() == 0 ) { mq0 += 1; }

View File

@ -24,8 +24,8 @@ public class MappingQualityZero implements InfoFieldAnnotation, StandardAnnotati
return null;
int mq0 = 0;
for ( String sample : stratifiedContexts.keySet() ) {
ReadBackedPileup pileup = stratifiedContexts.get(sample).getContext(StratifiedAlignmentContext.StratifiedContextType.COMPLETE).getBasePileup();
for ( Map.Entry<String, StratifiedAlignmentContext> sample : stratifiedContexts.entrySet() ) {
ReadBackedPileup pileup = sample.getValue().getContext(StratifiedAlignmentContext.StratifiedContextType.COMPLETE).getBasePileup();
for (PileupElement p : pileup ) {
if ( p.getMappingQual() == 0 )
mq0++;

View File

@ -23,8 +23,8 @@ public class RMSMappingQuality implements InfoFieldAnnotation, StandardAnnotatio
return null;
ArrayList<Integer> qualities = new ArrayList<Integer>();
for ( String sample : stratifiedContexts.keySet() ) {
ReadBackedPileup pileup = stratifiedContexts.get(sample).getContext(StratifiedAlignmentContext.StratifiedContextType.COMPLETE).getBasePileup();
for ( Map.Entry<String, StratifiedAlignmentContext> sample : stratifiedContexts.entrySet() ) {
ReadBackedPileup pileup = sample.getValue().getContext(StratifiedAlignmentContext.StratifiedContextType.COMPLETE).getBasePileup();
for (PileupElement p : pileup )
qualities.add(p.getRead().getMappingQuality());
}

View File

@ -24,8 +24,8 @@ public class SpanningDeletions implements InfoFieldAnnotation, StandardAnnotatio
int deletions = 0;
int depth = 0;
for ( String sample : stratifiedContexts.keySet() ) {
ReadBackedPileup pileup = stratifiedContexts.get(sample).getContext(StratifiedAlignmentContext.StratifiedContextType.COMPLETE).getBasePileup();
for ( Map.Entry<String, StratifiedAlignmentContext> sample : stratifiedContexts.entrySet() ) {
ReadBackedPileup pileup = sample.getValue().getContext(StratifiedAlignmentContext.StratifiedContextType.COMPLETE).getBasePileup();
deletions += pileup.getNumberOfDeletions();
depth += pileup.size();
}

View File

@ -69,15 +69,14 @@ public class DiploidGenotypeCalculationModel extends JointEstimateGenotypeCalcul
// use flat priors for GLs
DiploidGenotypePriors priors = new DiploidGenotypePriors();
for ( String sample : contexts.keySet() ) {
StratifiedAlignmentContext context = contexts.get(sample);
ReadBackedPileup pileup = context.getContext(contextType).getBasePileup();
for ( Map.Entry<String, StratifiedAlignmentContext> sample : contexts.entrySet() ) {
ReadBackedPileup pileup = sample.getValue().getContext(contextType).getBasePileup();
// create the GenotypeLikelihoods object
GenotypeLikelihoods GL = new GenotypeLikelihoods(UAC.baseModel, priors, UAC.defaultPlatform);
GL.add(pileup, true, UAC.CAP_BASE_QUALITY);
GLs.put(sample, GL);
GLs.put(sample.getKey(), GL);
double[] posteriors = GL.getPosteriors();
@ -87,7 +86,7 @@ public class DiploidGenotypeCalculationModel extends JointEstimateGenotypeCalcul
if ( alt != ref ) {
DiploidGenotype hetGenotype = DiploidGenotype.createDiploidGenotype(ref, alt);
DiploidGenotype homGenotype = DiploidGenotype.createHomGenotype(alt);
AFMatrixMap.get(alt).setLikelihoods(posteriors[refGenotype.ordinal()], posteriors[hetGenotype.ordinal()], posteriors[homGenotype.ordinal()], sample);
AFMatrixMap.get(alt).setLikelihoods(posteriors[refGenotype.ordinal()], posteriors[hetGenotype.ordinal()], posteriors[homGenotype.ordinal()], sample.getKey());
}
}
}
@ -171,7 +170,7 @@ public class DiploidGenotypeCalculationModel extends JointEstimateGenotypeCalcul
}
protected class AlleleFrequencyMatrix {
protected static class AlleleFrequencyMatrix {
private double[][] matrix; // allele frequency matrix
private int[] indexes; // matrix to maintain which genotype is active

View File

@ -57,7 +57,7 @@ public class DiploidGenotypePriors {
private double[] priors = null;
// todo -- fix me when this issue is resolved
public static boolean RequirePriorSumToOne = false;
public static final boolean requirePriorSumToOne = false;
/**
* Create a new DiploidGenotypePriors object with flat priors for each diploid genotype
@ -124,7 +124,7 @@ public class DiploidGenotypePriors {
public boolean validate(boolean throwException) {
try {
if ( RequirePriorSumToOne && MathUtils.compareDoubles(MathUtils.sumLog10(priors), 1.0) != 0 ) {
if ( requirePriorSumToOne && MathUtils.compareDoubles(MathUtils.sumLog10(priors), 1.0) != 0 ) {
throw new IllegalStateException(String.format("Priors don't sum to 1: sum=%f %s", MathUtils.sumLog10(priors), Arrays.toString(priors)));
}

View File

@ -101,7 +101,7 @@ public class EmpiricalSubstitutionProbabilities extends FourBaseProbabilities {
double logP = log10pTrueGivenMiscall.get(pl)[i][j];
if ( logP == 0.0 )
throw new RuntimeException(String.format("Bad miscall base request miscalled=%c true=%b", miscalledBase, trueBase));
throw new RuntimeException(String.format("Bad miscall base request miscalled=%c true=%c", miscalledBase, trueBase));
else
return logP;
}

View File

@ -6,11 +6,7 @@ public class FourBaseProbabilitiesFactory {
//private FourBaseProbabilitiesFactory() {} // cannot be instantiated
public static BaseMismatchModel getBaseMismatchModel(final String name) {
BaseMismatchModel m = valueOf(name);
if ( m == null )
throw new RuntimeException("Unexpected BaseMismatchModel " + name);
else
return m;
return valueOf(name);
}
public static BaseMismatchModel getBaseMismatchModel(final FourBaseProbabilities m) {

View File

@ -37,11 +37,7 @@ public class GenotypeCalculationModelFactory {
//private GenotypeCalculationModelFactory() {} // cannot be instantiated
public static GenotypeCalculationModel.Model getGenotypeCalculationModel(final String name) {
GenotypeCalculationModel.Model m = valueOf(name);
if ( m == null )
throw new RuntimeException("Unexpected GenotypeCalculationModel " + name);
else
return m;
return valueOf(name);
}
/**

View File

@ -499,7 +499,7 @@ public class GenotypeLikelihoods implements Cloneable {
//
// Constant static data
//
protected final static double[] zeros = new double[DiploidGenotype.values().length];
final static double[] zeros = new double[DiploidGenotype.values().length];
static {
for ( DiploidGenotype g : DiploidGenotype.values() ) {

View File

@ -13,8 +13,8 @@ public abstract class JointEstimateGenotypeCalculationModel extends GenotypeCalc
// for use in optimizing the P(D|AF) calculations:
// how much off from the max likelihoods do we need to be before we can quit calculating?
protected static final Double LOG10_OPTIMIZATION_EPSILON = 8.0;
protected static final Double VALUE_NOT_CALCULATED = -1.0 * Double.MAX_VALUE;
protected static final double LOG10_OPTIMIZATION_EPSILON = 8.0;
protected static final double VALUE_NOT_CALCULATED = -1.0 * Double.MAX_VALUE;
private int minAlleleFrequencyToTest;
// because the null allele frequencies are constant for a given N,
@ -107,11 +107,9 @@ public abstract class JointEstimateGenotypeCalculationModel extends GenotypeCalc
protected void initializeBestAlternateAllele(byte ref, Map<String, StratifiedAlignmentContext> contexts) {
int[] qualCounts = new int[4];
for ( String sample : contexts.keySet() ) {
AlignmentContext context = contexts.get(sample).getContext(StratifiedAlignmentContext.StratifiedContextType.COMPLETE);
for ( Map.Entry<String, StratifiedAlignmentContext> sample : contexts.entrySet() ) {
// calculate the sum of quality scores for each base
ReadBackedPileup pileup = context.getBasePileup();
ReadBackedPileup pileup = sample.getValue().getContext(StratifiedAlignmentContext.StratifiedContextType.COMPLETE).getBasePileup();
for ( PileupElement p : pileup ) {
// ignore deletions
if ( p.isDeletion() )
@ -409,7 +407,7 @@ public abstract class JointEstimateGenotypeCalculationModel extends GenotypeCalc
strandScore *= 10.0;
//logger.debug(String.format("SLOD=%f", strandScore));
attributes.put("SB", new Double(strandScore));
attributes.put("SB", Double.valueOf(strandScore));
}
VariantContext vc = new VariantContext("UG_SNP_call", loc, alleles, genotypes, phredScaledConfidence/10.0, passesCallThreshold(phredScaledConfidence) ? null : filter, attributes);

View File

@ -92,8 +92,8 @@ public class SimpleIndelCalculationModel extends GenotypeCalculationModel {
protected void initializeAlleles(byte[] ref, Map<String, StratifiedAlignmentContext> contexts) {
for ( String sample : contexts.keySet() ) {
AlignmentContext context = contexts.get(sample).getContext(StratifiedAlignmentContext.StratifiedContextType.COMPLETE);
for ( Map.Entry<String, StratifiedAlignmentContext> sample : contexts.entrySet() ) {
AlignmentContext context = sample.getValue().getContext(StratifiedAlignmentContext.StratifiedContextType.COMPLETE);
totalCoverage += context.size();

View File

@ -82,7 +82,7 @@ public class UnifiedGenotyper extends LocusWalker<VariantCallContext, UnifiedGen
/**
* Inner class for collecting output statistics from the UG
*/
public class UGStatistics {
public static class UGStatistics {
/** The total number of passes examined -- i.e., the number of map calls */
long nBasesVisited = 0;

View File

@ -27,6 +27,7 @@ package org.broadinstitute.sting.gatk.walkers.genotyper;
import org.apache.log4j.Logger;
import org.broadinstitute.sting.gatk.GenomeAnalysisEngine;
import org.broadinstitute.sting.gatk.filters.BadMateFilter;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.StratifiedAlignmentContext;
@ -155,7 +156,7 @@ public class UnifiedGenotyperEngine {
return null;
VariantCallContext call;
BadlyMatedReadPileupFilter badlyMatedReadPileupFilter = new BadlyMatedReadPileupFilter(refContext);
BadReadPileupFilter badReadPileupFilter = new BadReadPileupFilter(refContext);
if ( rawContext.hasExtendedEventPileup() ) {
@ -165,7 +166,7 @@ public class UnifiedGenotyperEngine {
ReadBackedExtendedEventPileup pileup = rawPileup.getMappingFilteredPileup(UAC.MIN_MAPPING_QUALTY_SCORE);
// filter the context based on bad mates and mismatch rate
pileup = pileup.getFilteredPileup(badlyMatedReadPileupFilter);
pileup = pileup.getFilteredPileup(badReadPileupFilter);
// don't call when there is no coverage
if ( pileup.size() == 0 && !UAC.ALL_BASES_MODE )
@ -186,7 +187,7 @@ public class UnifiedGenotyperEngine {
ReadBackedPileup pileup = rawPileup.getBaseAndMappingFilteredPileup(UAC.MIN_BASE_QUALTY_SCORE, UAC.MIN_MAPPING_QUALTY_SCORE);
// filter the context based on bad mates and mismatch rate
pileup = pileup.getFilteredPileup(badlyMatedReadPileupFilter);
pileup = pileup.getFilteredPileup(badReadPileupFilter);
// don't call when there is no coverage
if ( pileup.size() == 0 && !UAC.ALL_BASES_MODE )
@ -227,16 +228,13 @@ public class UnifiedGenotyperEngine {
/**
* Filters low quality reads out of the pileup.
*/
private class BadlyMatedReadPileupFilter implements PileupElementFilter {
private class BadReadPileupFilter implements PileupElementFilter {
private ReferenceContext refContext;
public BadlyMatedReadPileupFilter(ReferenceContext refContext) { this.refContext = refContext; }
public BadReadPileupFilter(ReferenceContext refContext) { this.refContext = refContext; }
public boolean allow(PileupElement pileupElement) {
return ((UAC.USE_BADLY_MATED_READS ||
!pileupElement.getRead().getReadPairedFlag() ||
pileupElement.getRead().getMateUnmappedFlag() ||
pileupElement.getRead().getMateReferenceIndex() == pileupElement.getRead().getReferenceIndex()) &&
return ((UAC.USE_BADLY_MATED_READS || !BadMateFilter.hasBadMate(pileupElement.getRead())) &&
AlignmentUtils.mismatchesInRefWindow(pileupElement, refContext, true) <= UAC.MAX_MISMATCHES );
}
}

View File

@ -36,6 +36,7 @@ import org.broadinstitute.sting.gatk.refdata.utils.GATKFeature;
import org.broadinstitute.sting.gatk.walkers.ReadWalker;
import org.broadinstitute.sting.gatk.walkers.Reference;
import org.broadinstitute.sting.gatk.walkers.Window;
import org.broadinstitute.sting.gatk.filters.BadMateFilter;
import org.broadinstitute.sting.utils.*;
import org.broadinstitute.sting.utils.interval.IntervalFileMergingIterator;
import org.broadinstitute.sting.utils.text.TextFormattingUtils;
@ -298,7 +299,7 @@ public class IndelRealigner extends ReadWalker<Integer, Integer> {
read.getReadFailsVendorQualityCheckFlag() ||
read.getMappingQuality() == 0 ||
read.getAlignmentStart() == SAMRecord.NO_ALIGNMENT_START ||
(!REALIGN_BADLY_MATED_READS && read.getReadPairedFlag() && !read.getMateUnmappedFlag() && read.getMateReferenceIndex() != read.getReferenceIndex());
(!REALIGN_BADLY_MATED_READS && BadMateFilter.hasBadMate(read));
}
private void cleanAndCallMap(ReferenceContext ref, SAMRecord read, ReadMetaDataTracker metaDataTracker, GenomeLoc readLoc) {
@ -510,7 +511,9 @@ public class IndelRealigner extends ReadWalker<Integer, Integer> {
statsOutput.write(Double.toString(improvement));
statsOutput.write("\n");
statsOutput.flush();
} catch (Exception e) {}
} catch (Exception e) {
throw new StingException(e.getMessage());
}
}
} else {
//logger.debug("CLEAN: " + bestConsensus.cigar + " " + bestConsensus.str.toString() + " " + bestConsensus.cigar.numCigarElements() );
@ -535,7 +538,9 @@ public class IndelRealigner extends ReadWalker<Integer, Integer> {
try {
indelOutput.write(str.toString());
indelOutput.flush();
} catch (Exception e) {}
} catch (Exception e) {
throw new StingException(e.getMessage());
}
}
if ( statsOutput != null ) {
try {
@ -547,7 +552,9 @@ public class IndelRealigner extends ReadWalker<Integer, Integer> {
statsOutput.write(Double.toString(improvement));
statsOutput.write("\n");
statsOutput.flush();
} catch (Exception e) {}
} catch (Exception e) {
throw new StingException(e.getMessage());
}
}
// finish cleaning the appropriate reads
@ -571,7 +578,9 @@ public class IndelRealigner extends ReadWalker<Integer, Integer> {
statsOutput.write(String.format("%s\tFAIL\t%.1f%n",
readsToClean.getLocation().toString(), improvement));
statsOutput.flush();
} catch (Exception e) {}
} catch (Exception e) {
throw new StingException(e.getMessage());
}
}
}
@ -999,7 +1008,9 @@ public class IndelRealigner extends ReadWalker<Integer, Integer> {
try {
snpsOutput.write(sb.toString());
snpsOutput.flush();
} catch (Exception e) {}
} catch (Exception e) {
throw new StingException(e.getMessage());
}
}
return reduces;
}
@ -1200,7 +1211,7 @@ public class IndelRealigner extends ReadWalker<Integer, Integer> {
}
}
private class Consensus {
private static class Consensus {
public final byte[] str;
public final ArrayList<Pair<Integer, Integer>> readIndexes;
public final int positionOnReference;
@ -1230,7 +1241,7 @@ public class IndelRealigner extends ReadWalker<Integer, Integer> {
}
}
private class ReadBin {
private static class ReadBin {
private final ArrayList<SAMRecord> reads = new ArrayList<SAMRecord>();
private byte[] reference = null;

View File

@ -25,12 +25,12 @@
package org.broadinstitute.sting.gatk.walkers.indels;
import net.sf.samtools.SAMRecord;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.variantcontext.VariantContext;
import org.broadinstitute.sting.gatk.filters.Platform454Filter;
import org.broadinstitute.sting.gatk.filters.ZeroMappingQualityReadFilter;
import org.broadinstitute.sting.gatk.filters.BadMateFilter;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.*;
import org.broadinstitute.sting.utils.GenomeLoc;
@ -91,7 +91,7 @@ public class RealignerTargetCreator extends RodWalker<RealignerTargetCreator.Eve
long furthestStopPos = -1;
// look for insertions in the extended context (we'll get deletions from the normal context)
if ( context != null && context.hasExtendedEventPileup() ) {
if ( context.hasExtendedEventPileup() ) {
ReadBackedExtendedEventPileup pileup = context.getExtendedEventPileup();
if ( pileup.getNumberOfInsertions() > 0 ) {
hasIndel = hasInsertion = true;
@ -119,6 +119,8 @@ public class RealignerTargetCreator extends RodWalker<RealignerTargetCreator.Eve
if ( vc.isInsertion() )
hasInsertion = true;
break;
default:
break;
}
if ( hasIndel )
furthestStopPos = vc.getLocation().getStop();
@ -132,12 +134,11 @@ public class RealignerTargetCreator extends RodWalker<RealignerTargetCreator.Eve
int mismatchQualities = 0, totalQualities = 0;
byte refBase = ref.getBase();
for (PileupElement p : pileup ) {
SAMRecord read = p.getRead();
if ( !REALIGN_BADLY_MATED_READS && read.getReadPairedFlag() && !read.getMateUnmappedFlag() && read.getMateReferenceIndex() != read.getReferenceIndex() )
if ( !REALIGN_BADLY_MATED_READS && BadMateFilter.hasBadMate(p.getRead()) )
continue;
// check the ends of the reads to see how far they extend
furthestStopPos = Math.max(furthestStopPos, read.getAlignmentEnd());
furthestStopPos = Math.max(furthestStopPos, p.getRead().getAlignmentEnd());
// is it a deletion? (sanity check in case extended event missed it)
if ( p.isDeletion() ) {