FindBugs fixes throughout the code base

git-svn-id: file:///humgen/gsa-scr1/gsa-engineering/svn_contents/trunk@3823 348d0f76-0448-11de-a6fe-93d51630548a
This commit is contained in:
depristo 2010-07-18 16:29:59 +00:00
parent 693672a461
commit 7c42e6994f
21 changed files with 78 additions and 69 deletions

View File

@ -123,6 +123,9 @@ public class GenomeAnalysisEngine {
*/
public GenomeAnalysisEngine() {
// make sure our instance variable points to this analysis engine
// if ( instance != null )
// throw new StingException("Instantiating GenomeAnalysisEngine but global instance variable isn't null, indicating that an instance has already been created: " + instance);
instance = this;
walkerManager = new WalkerManager();
filterManager = new FilterManager();
@ -736,7 +739,7 @@ public class GenomeAnalysisEngine {
Shard.ShardType shardType;
if(walker instanceof LocusWalker) {
if(readsDataSource != null && readsDataSource.getSortOrder() != SAMFileHeader.SortOrder.coordinate)
if (readsDataSource.getSortOrder() != SAMFileHeader.SortOrder.coordinate)
Utils.scareUser("Locus walkers can only walk over coordinate-sorted data. Please resort your input BAM file.");
shardType = Shard.ShardType.LOCUS;
}

View File

@ -23,6 +23,7 @@
package org.broadinstitute.sting.gatk.contexts.variantcontext;
import java.io.Serializable;
import java.util.*;
import org.apache.commons.jexl2.*;
import org.broadinstitute.sting.utils.StingException;
@ -32,7 +33,7 @@ import org.broadinstitute.sting.utils.genotype.HardyWeinbergCalculation;
import org.broad.tribble.vcf.VCFConstants;
public class VariantContextUtils {
public static JexlEngine engine = new JexlEngine();
final public static JexlEngine engine = new JexlEngine();
/**
* A simple but common wrapper for matching VariantContext objects using JEXL expressions
@ -64,10 +65,10 @@ public class VariantContextUtils {
*/
public static List<JexlVCMatchExp> initializeMatchExps(String[] names, String[] exps) {
if ( names == null || exps == null )
throw new StingException("BUG: neither names nor exps can be null: names " + names + " exps=" + exps );
throw new StingException("BUG: neither names nor exps can be null: names " + Arrays.toString(names) + " exps=" + Arrays.toString(exps) );
if ( names.length != exps.length )
throw new StingException("Inconsistent number of provided filter names and expressions: names=" + names + " exps=" + exps);
throw new StingException("Inconsistent number of provided filter names and expressions: names=" + Arrays.toString(names) + " exps=" + Arrays.toString(exps));
Map<String, String> map = new HashMap<String, String>();
for ( int i = 0; i < names.length; i++ ) { map.put(names[i], exps[i]); }
@ -377,7 +378,7 @@ public class VariantContextUtils {
}
static class CompareByPriority implements Comparator<VariantContext> {
static class CompareByPriority implements Comparator<VariantContext>, Serializable {
List<String> priorityListOfVCs;
public CompareByPriority(List<String> priorityListOfVCs) {
this.priorityListOfVCs = priorityListOfVCs;
@ -390,7 +391,7 @@ public class VariantContextUtils {
}
public int compare(VariantContext vc1, VariantContext vc2) {
return new Integer(getIndex(vc1)).compareTo(getIndex(vc2));
return Integer.valueOf(getIndex(vc1)).compareTo(getIndex(vc2));
}
}

View File

@ -35,6 +35,7 @@ import java.util.*;
/**
*
* @author aaron
* @author depristo
*
* Class VariantJEXLContext
*
@ -42,12 +43,13 @@ import java.util.*;
* having to generate a JEXML context lookup map everytime we want to evaluate an expression.
*
* This is package protected, only classes in variantcontext should have access to it.
*
* // todo -- clean up to remove or better support genotype filtering
*/
class VariantJEXLContext implements JexlContext {
// our stored variant context
private VariantContext vc;
private Genotype g;
private interface AttributeGetter {
public Object get(VariantContext vc);
@ -71,16 +73,14 @@ class VariantJEXLContext implements JexlContext {
}
public VariantJEXLContext(VariantContext vc) {
this(vc, null);
}
public VariantJEXLContext(VariantContext vc, Genotype g) {
this.vc = vc;
this.g = g;
//throw new UnsupportedOperationException("Cannot instantiate VariantJEXLContext");
}
// public VariantJEXLContext(VariantContext vc, Genotype g) {
// this.vc = vc;
// //throw new UnsupportedOperationException("Cannot instantiate VariantJEXLContext");
// }
public Object get(String name) {
Object result = null;
if ( x.containsKey(name) ) { // dynamic resolution of name -> value via map
@ -155,12 +155,11 @@ class JEXLMap implements Map<VariantContextUtils.JexlVCMatchExp, Boolean> {
* should get added.
*
*/
private static final boolean USE_VCONTEXT = true;
private void createContext() {
if ( USE_VCONTEXT && g == null ) {
jContext = new VariantJEXLContext(vc, g);
if ( g == null ) {
// todo -- remove dependancy on g to the entire system
jContext = new VariantJEXLContext(vc);
} else {
Map<String, Object> infoMap = new HashMap<String, Object>();
if ( vc != null ) {
@ -206,9 +205,6 @@ class JEXLMap implements Map<VariantContextUtils.JexlVCMatchExp, Boolean> {
// create the internal context that we can evaluate expressions against
jContext = new MapContext(infoMap);
// jContext = JexlHelper.createContext();
// jContext.setVars(infoMap);
}
}

View File

@ -63,8 +63,7 @@ import java.util.regex.Pattern;
* To change this template use File | Settings | File Templates.
*/
public class PlatformUnitFilterHelper {
public static Pattern EMPTYLINE_PATTERN = Pattern.compile("^\\s*$");
final public static Pattern EMPTYLINE_PATTERN = Pattern.compile("^\\s*$");
public PlatformUnitFilterHelper(String arg) {
File f = new File(arg);

View File

@ -40,8 +40,8 @@ import java.util.*;
/** Iterator that traverses a SAM File, accumulating information on a per-locus basis */
public class LocusIteratorByState extends LocusIterator {
private static long discarded_bases = 0L;
private static long observed_bases = 0L;
// private static long discarded_bases = 0L;
// private static long observed_bases = 0L;
//
// todo -- eric, add your UG filters here
@ -69,7 +69,7 @@ public class LocusIteratorByState extends LocusIterator {
private final Collection<String> sampleNames = new ArrayList<String>();
private final ReadStateManager readStates;
private class SAMRecordState {
static private class SAMRecordState {
SAMRecord read;
int readOffset = -1; // how far are we offset from the start of the read bases?
int genomeOffset = -1; // how far are we offset from the alignment start on the genome?
@ -226,7 +226,11 @@ public class LocusIteratorByState extends LocusIterator {
insertedBases = null;
// System.out.println("Deleted "+eventLength +" bases after "+readOffset);
}
} // continue onto the 'N' case !
}
// should be the same as N case
genomeOffset++;
done = true;
break;
case N : // reference skip (looks and gets processed just like a "deletion", just different logical meaning)
genomeOffset++;
done = true;
@ -448,11 +452,11 @@ public class LocusIteratorByState extends LocusIterator {
SAMRecordState state = iterator.next();
if ( state.getCurrentCigarOperator() != CigarOperator.D && state.getCurrentCigarOperator() != CigarOperator.N ) {
if ( filterRead(state.getRead(), location.getStart(), filters ) ) {
discarded_bases++;
//discarded_bases++;
//printStatus("Adaptor bases", discarded_adaptor_bases);
continue;
} else {
observed_bases++;
//observed_bases++;
pile.add(new PileupElement(state.getRead(), state.getReadOffset()));
size++;
}
@ -504,10 +508,10 @@ public class LocusIteratorByState extends LocusIterator {
return false;
}
private void printStatus(final String title, long n) {
if ( n % 10000 == 0 )
System.out.printf("%s %d / %d = %.2f%n", title, n, observed_bases, 100.0 * n / (observed_bases + 1));
}
// private void printStatus(final String title, long n) {
// if ( n % 10000 == 0 )
// System.out.printf("%s %d / %d = %.2f%n", title, n, observed_bases, 100.0 * n / (observed_bases + 1));
// }
private void updateReadStates() {
for(String sampleName: sampleNames) {
@ -865,7 +869,7 @@ public class LocusIteratorByState extends LocusIterator {
/**
* Note: assuming that, whenever we downsample, we downsample to an integer capacity.
*/
private class Counter {
static private class Counter {
private int count;
public Counter(int count) {

View File

@ -20,7 +20,7 @@ public class ReadFormattingIterator implements StingSAMIterator {
/**
* Logger.
*/
protected static Logger logger = Logger.getLogger(ReadFormattingIterator.class);
final protected static Logger logger = Logger.getLogger(ReadFormattingIterator.class);
/**
* Iterator to which to pass

View File

@ -19,7 +19,6 @@ public class VerifyingSamIterator implements StingSAMIterator {
StingSAMIterator it;
SAMRecord last = null;
boolean checkOrderP = true;
long nOutOfOrderReads = 0;
public VerifyingSamIterator(StingSAMIterator it) {
this.it = it;

View File

@ -36,6 +36,7 @@ import org.broadinstitute.sting.utils.collections.Pair;
import org.broadinstitute.sting.gatk.io.StingSAMFileWriter;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import java.nio.channels.IllegalSelectorException;
import java.util.*;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
@ -336,6 +337,8 @@ public class ClipReadsWalker extends ReadWalker<ClipReadsWalker.ReadClipper, Cli
case MATCHES_CLIP_SEQ:
data.incSeqClippedBases((String) op.extraInfo, op.getLength());
break;
default:
throw new IllegalStateException("Unexpected Clipping operator type " + op);
}
}
}
@ -353,7 +356,7 @@ public class ClipReadsWalker extends ReadWalker<ClipReadsWalker.ReadClipper, Cli
//
// --------------------------------------------------------------------------------------------------------------
private class SeqToClip {
private static class SeqToClip {
String name;
String seq, revSeq;
Pattern fwdPat, revPat;
@ -475,6 +478,8 @@ public class ClipReadsWalker extends ReadWalker<ClipReadsWalker.ReadClipper, Cli
break;
//throw new RuntimeException("Softclipping of bases not yet implemented.");
default:
throw new IllegalStateException("Unexpected Clipping operator type " + algorithm);
}
}
}
@ -492,7 +497,7 @@ public class ClipReadsWalker extends ReadWalker<ClipReadsWalker.ReadClipper, Cli
/**
* A simple collection of the clipping operations to apply to a read along with its read
*/
public class ReadClipper {
static public class ReadClipper {
SAMRecord read;
List<ClippingOp> ops = null;
@ -551,7 +556,7 @@ public class ClipReadsWalker extends ReadWalker<ClipReadsWalker.ReadClipper, Cli
}
}
public class ClippingData {
public static class ClippingData {
public long nTotalReads = 0;
public long nTotalBases = 0;
public long nClippedReads = 0;

View File

@ -42,7 +42,7 @@ import java.text.NumberFormat;
@Requires({DataSource.READS})
public class FlagStatWalker extends ReadWalker<Integer, Integer> {
// what comes out of the flagstat
class FlagStat {
static class FlagStat {
int readCount = 0;
int QC_failure = 0;
int duplicates = 0;
@ -144,7 +144,7 @@ public class FlagStatWalker extends ReadWalker<Integer, Integer> {
myStat.singletons++;
}
}
if (read.getReferenceIndex() >= 0 && read.getMateReferenceIndex() >= 0 && read.getReferenceIndex() != read.getMateReferenceIndex()) {
if (read.getReferenceIndex() >= 0 && read.getMateReferenceIndex() >= 0 && ! read.getReferenceIndex().equals(read.getMateReferenceIndex())) {
myStat.with_mate_mapped_to_a_different_chr++;
if (read.getMappingQuality() >= 5) {

View File

@ -43,7 +43,7 @@ import net.sf.picard.filter.SamRecordFilter;
* To change this template use File | Settings | File Templates.
*/
public abstract class Walker<MapType, ReduceType> {
protected static Logger logger = Logger.getLogger(Walker.class);
final protected static Logger logger = Logger.getLogger(Walker.class);
/**
* A stream for writing normal (non-error) output. System.out by default.

View File

@ -91,7 +91,7 @@ public class CallableLociWalker extends LocusWalker<CallableLociWalker.CallableB
}
}
public class Integrator {
public static class Integrator {
long counts[] = new long[CalledState.values().length];
CallableBaseState state = null;
}

View File

@ -138,8 +138,7 @@ public class DepthOfCoverageWalker extends LocusWalker<Map<CoverageAggregator.Ag
}
if ( ! goodOutputFormat ) {
System.out.println("Improper output format. Can be one of table,rtable,csv. Was "+outputFormat);
System.exit(0);
throw new IllegalArgumentException("Improper output format. Can be one of table,rtable,csv. Was "+outputFormat);
}
if ( outputFormat.equals("csv") ) {
@ -604,10 +603,11 @@ public class DepthOfCoverageWalker extends LocusWalker<Map<CoverageAggregator.Ag
hBuilder.append(String.format("from_%d_to_inf%n",leftEnds[leftEnds.length-1]));
output.print(hBuilder.toString());
Map<String,int[]> histograms = stats.getHistograms();
for ( String s : histograms.keySet() ) {
for ( Map.Entry<String, int[]> p : histograms.entrySet() ) {
StringBuilder sBuilder = new StringBuilder();
sBuilder.append(String.format("sample_%s",s));
for ( int count : histograms.get(s) ) {
sBuilder.append(String.format("sample_%s",p.getKey()));
for ( int count : p.getValue() ) {
sBuilder.append(String.format("%s%d",separator,count));
}
sBuilder.append(String.format("%n"));
@ -698,8 +698,9 @@ public class DepthOfCoverageWalker extends LocusWalker<Map<CoverageAggregator.Ag
Map<String,Long> totals = stats.getTotals();
int[] leftEnds = stats.getEndpoints();
for ( String s : histograms.keySet() ) {
int[] histogram = histograms.get(s);
for ( Map.Entry<String, int[]> p : histograms.entrySet() ) {
String s = p.getKey();
int[] histogram = p.getValue();
int median = getQuantile(histogram,0.5);
int q1 = getQuantile(histogram,0.25);
int q3 = getQuantile(histogram,0.75);
@ -821,6 +822,8 @@ public class DepthOfCoverageWalker extends LocusWalker<Map<CoverageAggregator.Ag
for (CoverageAggregator.AggregationType t : aggregationTypes ) {
List<String> order = orderCheck.get(t);
List<String> namesInAg = ag.getIdentifiersByType().get(t);
// todo -- chris check me
Set<String> namesInDOCS = ag.getCoverageByAggregationType(t).getAllSamples();
int index = 0;
for ( String s : namesInAg ) {

View File

@ -66,19 +66,19 @@ public class FastaReferenceWalker extends RefWalker<Pair<GenomeLoc, String>, Gen
// if there is no interval to the left, then this is the first one
if ( sum == null ) {
sum = value.first;
fasta.append(value.second.toString());
fasta.append(value.second);
}
// if the intervals don't overlap, print out the leftmost one and start a new one
// (end of contig or new interval)
else if ( value.first.getStart() != sum.getStop() + 1 ) {
fasta.flush();
sum = value.first;
fasta.append(value.second.toString());
fasta.append(value.second);
}
// otherwise, merge them
else {
sum = GenomeLocParser.setStop(sum, value.first.getStop());
fasta.append(value.second.toString());
fasta.append(value.second);
}
return sum;
}

View File

@ -45,16 +45,12 @@ import org.broadinstitute.sting.utils.Utils;
*/
public class CycleCovariate implements StandardCovariate {
private static boolean warnedUserBadPlatform = false;
private static String defaultPlatform = null;
// Initialize any member variables using the command-line arguments passed to the walkers
public void initialize( final RecalibrationArgumentCollection RAC ) {
if( RAC.DEFAULT_PLATFORM != null ) {
if( RAC.DEFAULT_PLATFORM.equalsIgnoreCase( "SLX" ) || RAC.DEFAULT_PLATFORM.equalsIgnoreCase( "ILLUMINA" ) ||
RAC.DEFAULT_PLATFORM.contains( "454" ) || RAC.DEFAULT_PLATFORM.equalsIgnoreCase( "SOLID" ) || RAC.DEFAULT_PLATFORM.equalsIgnoreCase( "ABI_SOLID" ) ) {
defaultPlatform = RAC.DEFAULT_PLATFORM;
// nothing to do
} else {
throw new StingException( "The requested default platform (" + RAC.DEFAULT_PLATFORM +") is not a recognized platform. Implemented options are illumina, 454, and solid");
}

View File

@ -168,7 +168,7 @@ public class RecalDataManager {
}
private void checkForSingletons( final Map data ) {
// todo -- this looks like it's better just as a data.valueSet() call?
for( Object comp : data.keySet() ) {
final Object val = data.get(comp);
if( val instanceof RecalDatum ) { // We are at the end of the nested hash maps

View File

@ -111,6 +111,8 @@ public class CountVariants extends VariantEvaluator {
case MIXED:
nComplex++;
break;
default:
throw new StingException("Unexpected VariantContext type " + vc1.getType());
}
for (Genotype g : vc1.getGenotypes().values()) {

View File

@ -59,7 +59,7 @@ public class GenotypeConcordance extends VariantEvaluator {
private static final int MAX_MISSED_VALIDATION_DATA = 100;
class FrequencyStats implements TableType {
static class FrequencyStats implements TableType {
class Stats {
public Stats(int found, int missed) { nFound = found; nMissed = missed; }
public long nFound = 0;
@ -103,8 +103,8 @@ public class GenotypeConcordance extends VariantEvaluator {
}
}
class QualityScoreHistograms implements TableType {
final int NUM_BINS = 20;
static class QualityScoreHistograms implements TableType {
final static int NUM_BINS = 20;
final HashMap<Integer,Integer> truePositiveQualityScoreMap = new HashMap<Integer,Integer>(); // A HashMap holds all the quality scores until we are able to bin them appropriately
final HashMap<Integer,Integer> falsePositiveQualityScoreMap = new HashMap<Integer,Integer>();
final int truePositiveHist[] = new int[NUM_BINS]; // the final histograms that get reported out

View File

@ -17,7 +17,7 @@ import org.broadinstitute.sting.utils.StingException;
*/
@Analysis(name = "Indel length histograms", description = "Shows the distrbution of insertion/deletion event lengths (negative for deletion, positive for insertion)")
public class IndelLengthHistogram extends VariantEvaluator {
private final int SIZE_LIMIT = 50;
private static final int SIZE_LIMIT = 50;
@DataPoint(name="indelLengthHistogram",description="Histogram of indel lengths")
IndelHistogram indelHistogram = new IndelHistogram(SIZE_LIMIT);
@ -25,7 +25,7 @@ public class IndelLengthHistogram extends VariantEvaluator {
* Indel length histogram table object
*/
class IndelHistogram implements TableType {
static class IndelHistogram implements TableType {
private Integer[] colKeys;
private int limit;
private String[] rowKeys = {"EventLength"};

View File

@ -91,7 +91,7 @@ public class CombineVariants extends RodWalker<Integer, Integer> {
if ( rodNames.size() != priority.size() )
throw new StingException("The priority list must contain exactly one rod binding per ROD provided to the GATK: rodNames=" + rodNames + " priority=" + priority);
if ( ! rodNames.containsAll(rodNames) )
if ( ! rodNames.containsAll(priority) )
throw new StingException("Not all priority elements provided as input RODs: " + PRIORITY_STRING);
}

View File

@ -133,14 +133,15 @@ public class LocusMismatchWalker extends LocusWalker<String,Integer> implements
}
if ( nMismatches < maxNumMismatches && nMismatches >= minMismatches && usableDepth >= minDepth ) {
String baseCountString = "";
StringBuffer baseCountString = new StringBuffer();
for ( byte b : BaseUtils.BASES ) {
baseCountString += baseCounts[BaseUtils.simpleBaseToBaseIndex(b)] + " ";
baseCountString.append(baseCounts[BaseUtils.simpleBaseToBaseIndex(b)]);
baseCountString.append(" ");
}
return String.format("%s %c %10s %5.2f %d %d %d %s",
pileup.getLocation(), ref.getBaseAsChar(),
getGenotypeClass(g), 10 * g.getNegLog10PError(),
usableDepth, nMismatches, qSumMismatches, baseCountString);
usableDepth, nMismatches, qSumMismatches, baseCountString.toString());
}
return null;

View File

@ -230,7 +230,7 @@ public class HybSelPerformanceWalker extends LocusWalker<Integer, HybSelPerforma
basesConsidered += length;
}
}
double meanTargetCoverage = totalCoverage / basesConsidered;
double meanTargetCoverage = (1.0*totalCoverage) / basesConsidered;
for(Pair<GenomeLoc, TargetInfo> pair : results) {