Quick pass of FindBugs 'inefficient use of keySet iterator instead of entrySet iterator' fixes for core tools.

This commit is contained in:
Eric Banks 2012-08-08 14:29:41 -04:00
parent 3e2752667c
commit 4b2e3cec0b
12 changed files with 45 additions and 45 deletions

View File

@ -332,11 +332,11 @@ public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implem
final Map<String, AlignmentContext> splitContexts = AlignmentContextUtils.splitContextBySampleName(context);
final GenotypesContext genotypes = GenotypesContext.create(splitContexts.keySet().size());
final MathUtils.RunningAverage averageHQSoftClips = new MathUtils.RunningAverage();
for( final String sample : splitContexts.keySet() ) {
for( Map.Entry<String, AlignmentContext> sample : splitContexts.entrySet() ) {
final double[] genotypeLikelihoods = new double[3]; // ref versus non-ref (any event)
Arrays.fill(genotypeLikelihoods, 0.0);
for( final PileupElement p : splitContexts.get(sample).getBasePileup() ) {
for( final PileupElement p : sample.getValue().getBasePileup() ) {
final byte qual = ( USE_EXPANDED_TRIGGER_SET ?
( p.isNextToSoftClip() || p.isBeforeInsertion() || p.isAfterInsertion() ? ( p.getQual() > QualityUtils.MIN_USABLE_Q_SCORE ? p.getQual() : (byte) 20 ) : p.getQual() )
: p.getQual() );
@ -362,7 +362,7 @@ public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implem
genotypeLikelihoods[BB] += p.getRepresentativeCount() * QualityUtils.qualToErrorProbLog10(qual) + LOG_ONE_THIRD;
}
}
genotypes.add( new GenotypeBuilder(sample).alleles(noCall).PL(genotypeLikelihoods).make() );
genotypes.add( new GenotypeBuilder(sample.getKey()).alleles(noCall).PL(genotypeLikelihoods).make() );
}
final ArrayList<Allele> alleles = new ArrayList<Allele>();

View File

@ -53,8 +53,8 @@ public class LikelihoodCalculationEngine {
public void computeReadLikelihoods( final ArrayList<Haplotype> haplotypes, final HashMap<String, ArrayList<GATKSAMRecord>> perSampleReadList ) {
int X_METRIC_LENGTH = 0;
for( final String sample : perSampleReadList.keySet() ) {
for( final GATKSAMRecord read : perSampleReadList.get(sample) ) {
for( final Map.Entry<String, ArrayList<GATKSAMRecord>> sample : perSampleReadList.entrySet() ) {
for( final GATKSAMRecord read : sample.getValue() ) {
final int readLength = read.getReadLength();
if( readLength > X_METRIC_LENGTH ) { X_METRIC_LENGTH = readLength; }
}
@ -326,9 +326,9 @@ public class LikelihoodCalculationEngine {
public static Map<String, Map<Allele, List<GATKSAMRecord>>> partitionReadsBasedOnLikelihoods( final GenomeLocParser parser, final HashMap<String, ArrayList<GATKSAMRecord>> perSampleReadList, final HashMap<String, ArrayList<GATKSAMRecord>> perSampleFilteredReadList, final Pair<VariantContext, HashMap<Allele,ArrayList<Haplotype>>> call) {
final Map<String, Map<Allele, List<GATKSAMRecord>>> returnMap = new HashMap<String, Map<Allele, List<GATKSAMRecord>>>();
final GenomeLoc callLoc = parser.createGenomeLoc(call.getFirst());
for( final String sample : perSampleReadList.keySet() ) {
for( final Map.Entry<String, ArrayList<GATKSAMRecord>> sample : perSampleReadList.entrySet() ) {
final Map<Allele, List<GATKSAMRecord>> alleleReadMap = new HashMap<Allele, List<GATKSAMRecord>>();
final ArrayList<GATKSAMRecord> readsForThisSample = perSampleReadList.get(sample);
final ArrayList<GATKSAMRecord> readsForThisSample = sample.getValue();
for( int iii = 0; iii < readsForThisSample.size(); iii++ ) {
final GATKSAMRecord read = readsForThisSample.get(iii); // BUGBUG: assumes read order in this list and haplotype likelihood list are the same!
// only count the read if it overlaps the event, otherwise it is not added to the output read list at all
@ -338,7 +338,7 @@ public class LikelihoodCalculationEngine {
for( final Allele a : call.getFirst().getAlleles() ) { // find the allele with the highest haplotype likelihood
double maxLikelihood = Double.NEGATIVE_INFINITY;
for( final Haplotype h : call.getSecond().get(a) ) { // use the max likelihood from all the haplotypes which mapped to this allele (achieved via the haplotype mapper object)
final double likelihood = h.getReadLikelihoods(sample)[iii];
final double likelihood = h.getReadLikelihoods(sample.getKey())[iii];
if( likelihood > maxLikelihood ) {
maxLikelihood = likelihood;
}
@ -373,13 +373,13 @@ public class LikelihoodCalculationEngine {
readList = new ArrayList<GATKSAMRecord>();
alleleReadMap.put(Allele.NO_CALL, readList);
}
for( final GATKSAMRecord read : perSampleFilteredReadList.get(sample) ) {
for( final GATKSAMRecord read : perSampleFilteredReadList.get(sample.getKey()) ) {
// only count the read if it overlaps the event, otherwise it is not added to the output read list at all
if( callLoc.overlapsP(parser.createGenomeLoc(read)) ) {
readList.add(read);
}
}
returnMap.put(sample, alleleReadMap);
returnMap.put(sample.getKey(), alleleReadMap);
}
return returnMap;
}

View File

@ -65,12 +65,12 @@ public class BaseQualityRankSumTest extends RankSumTest implements StandardAnnot
// by design, first element in LinkedHashMap was ref allele
double refLikelihood=0.0, altLikelihood=Double.NEGATIVE_INFINITY;
for (Allele a : el.keySet()) {
for (Map.Entry<Allele, Double> entry : el.entrySet()) {
if (a.isReference())
refLikelihood =el.get(a);
if (entry.getKey().isReference())
refLikelihood = entry.getValue();
else {
double like = el.get(a);
double like = entry.getValue();
if (like >= altLikelihood)
altLikelihood = like;
}

View File

@ -291,8 +291,8 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat
int[][] table = new int[2][2];
for ( String sample : stratifiedContexts.keySet() ) {
final AlignmentContext context = stratifiedContexts.get(sample);
for ( Map.Entry<String, AlignmentContext> sample : stratifiedContexts.entrySet() ) {
final AlignmentContext context = sample.getValue();
if ( context == null )
continue;
@ -313,12 +313,12 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat
double refLikelihood=0.0, altLikelihood=Double.NEGATIVE_INFINITY;
for (Allele a : el.keySet()) {
for (Map.Entry<Allele,Double> entry : el.entrySet()) {
if (a.isReference())
refLikelihood =el.get(a);
if (entry.getKey().isReference())
refLikelihood = entry.getValue();
else {
double like = el.get(a);
double like = entry.getValue();
if (like >= altLikelihood)
altLikelihood = like;
}

View File

@ -362,8 +362,8 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
// Score all the reads in the pileup, even the filtered ones
final double[] scores = new double[el.size()];
int i = 0;
for (Allele a : el.keySet()) {
scores[i++] = -el.get(a);
for (Map.Entry<Allele, Double> a : el.entrySet()) {
scores[i++] = -a.getValue();
if (DEBUG) {
System.out.printf(" vs. haplotype %d = %f%n", i - 1, scores[i - 1]);
}

View File

@ -61,12 +61,12 @@ public class MappingQualityRankSumTest extends RankSumTest implements StandardAn
// by design, first element in LinkedHashMap was ref allele
double refLikelihood=0.0, altLikelihood=Double.NEGATIVE_INFINITY;
for (Allele a : el.keySet()) {
for (Map.Entry<Allele,Double> a : el.entrySet()) {
if (a.isReference())
refLikelihood =el.get(a);
if (a.getKey().isReference())
refLikelihood = a.getValue();
else {
double like = el.get(a);
double like = a.getValue();
if (like >= altLikelihood)
altLikelihood = like;
}

View File

@ -87,11 +87,11 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio
LinkedHashMap<Allele, Double> el = indelLikelihoodMap.get(p); // retrieve likelihood information corresponding to this read
double refLikelihood = 0.0, altLikelihood = Double.NEGATIVE_INFINITY; // by design, first element in LinkedHashMap was ref allele
for (Allele a : el.keySet()) {
if (a.isReference())
refLikelihood = el.get(a);
for (Map.Entry<Allele,Double> a : el.entrySet()) {
if (a.getKey().isReference())
refLikelihood = a.getValue();
else {
double like = el.get(a);
double like = a.getValue();
if (like >= altLikelihood)
altLikelihood = like;
}
@ -100,7 +100,6 @@ public class ReadPosRankSumTest extends RankSumTest implements StandardAnnotatio
int readPos = getOffsetFromClippedReadStart(p.getRead(), p.getOffset());
final int numAlignedBases = getNumAlignedBases(p.getRead());
int rp = readPos;
if (readPos > numAlignedBases / 2) {
readPos = numAlignedBases - (readPos + 1);
}

View File

@ -148,8 +148,8 @@ public class ConsensusAlleleCounter {
boolean foundKey = false;
// copy of hashmap into temp arrayList
ArrayList<Pair<String,Integer>> cList = new ArrayList<Pair<String,Integer>>();
for (String s : consensusIndelStrings.keySet()) {
cList.add(new Pair<String, Integer>(s,consensusIndelStrings.get(s)));
for (Map.Entry<String, Integer> s : consensusIndelStrings.entrySet()) {
cList.add(new Pair<String, Integer>(s.getKey(), s.getValue()));
}
if (read.getAlignmentEnd() == loc.getStart()) {

View File

@ -35,6 +35,7 @@ import org.broadinstitute.sting.utils.variantcontext.Allele;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
public class HaplotypeIndelErrorModel {
@ -427,8 +428,8 @@ public class HaplotypeIndelErrorModel {
// for each read/haplotype combination, compute likelihoods, ie -10*log10(Pr(R | Hi))
// = sum_j(-10*log10(Pr(R_j | Hi) since reads are assumed to be independent
int j=0;
for (Allele a: haplotypesInVC.keySet()) {
readLikelihoods[i][j]= computeReadLikelihoodGivenHaplotype(haplotypesInVC.get(a), read);
for (Map.Entry<Allele,Haplotype> a: haplotypesInVC.entrySet()) {
readLikelihoods[i][j]= computeReadLikelihoodGivenHaplotype(a.getValue(), read);
if (DEBUG) {
System.out.print(read.getReadName()+" ");

View File

@ -426,10 +426,10 @@ public class PhaseByTransmission extends RodWalker<HashMap<Byte,Integer>, HashMa
Map<String,Set<Sample>> families = this.getSampleDB().getFamilies();
Set<Sample> family;
ArrayList<Sample> parents;
for(String familyID : families.keySet()){
family = families.get(familyID);
for(Map.Entry<String,Set<Sample>> familyEntry : families.entrySet()){
family = familyEntry.getValue();
if(family.size()<2 || family.size()>3){
logger.info(String.format("Caution: Family %s has %d members; At the moment Phase By Transmission only supports trios and parent/child pairs. Family skipped.",familyID,family.size()));
logger.info(String.format("Caution: Family %s has %d members; At the moment Phase By Transmission only supports trios and parent/child pairs. Family skipped.",familyEntry.getKey(),family.size()));
}
else{
for(Sample familyMember : family){
@ -438,7 +438,7 @@ public class PhaseByTransmission extends RodWalker<HashMap<Byte,Integer>, HashMa
if(family.containsAll(parents))
this.trios.add(familyMember);
else
logger.info(String.format("Caution: Family %s skipped as it is not a trio nor a parent/child pair; At the moment Phase By Transmission only supports trios and parent/child pairs. Family skipped.",familyID));
logger.info(String.format("Caution: Family %s skipped as it is not a trio nor a parent/child pair; At the moment Phase By Transmission only supports trios and parent/child pairs. Family skipped.",familyEntry.getKey()));
break;
}
}

View File

@ -183,13 +183,13 @@ public class VariantEvalReportWriter {
throw new ReviewedStingException("Datamap is empty for analysis " + scanner.getAnalysis());
// add DataPoint's for each field marked as such
for (final Field field : datamap.keySet()) {
for (final Map.Entry<Field, DataPoint> field : datamap.entrySet()) {
try {
field.setAccessible(true);
field.getKey().setAccessible(true);
// this is an atomic value, add a column for it
final String format = datamap.get(field).format();
table.addColumn(field.getName(), format);
final String format = field.getValue().format();
table.addColumn(field.getKey().getName(), format);
} catch (SecurityException e) {
throw new StingException("SecurityException: " + e);
}

View File

@ -681,8 +681,8 @@ public class IntervalUtils {
LinkedHashMap<String, List<GenomeLoc>> locsByContig = splitByContig(sorted);
List<GenomeLoc> expanded = new ArrayList<GenomeLoc>();
for (String contig: locsByContig.keySet()) {
List<GenomeLoc> contigLocs = locsByContig.get(contig);
for (Map.Entry<String, List<GenomeLoc>> contig: locsByContig.entrySet()) {
List<GenomeLoc> contigLocs = contig.getValue();
int contigLocsSize = contigLocs.size();
GenomeLoc startLoc, stopLoc;