Bug fix in LikelihoodCalculationEngine: Mapping quality was being cast to a byte and overflowing for reads with large mapping quality scores.

This commit is contained in:
Ryan Poplin 2013-02-21 12:53:13 -05:00
parent 09b444de26
commit 62e14f5b58
2 changed files with 2 additions and 2 deletions

View File

@ -427,7 +427,7 @@ public class DiploidSNPGenotypeLikelihoods implements Cloneable {
if ( qual > SAMUtils.MAX_PHRED_SCORE )
throw new UserException.MisencodedBAM(p.getRead(), "we encountered an extremely high quality score (" + (int)qual + ")");
if ( capBaseQualsAtMappingQual )
qual = (byte)Math.min((int)qual, p.getMappingQual());
qual = (byte) Math.min( 0xff & qual, p.getMappingQual());
if ( (int)qual < minBaseQual )
qual = (byte)0;

View File

@ -137,7 +137,7 @@ public class LikelihoodCalculationEngine {
final byte[] readInsQuals = read.getBaseInsertionQualities();
final byte[] readDelQuals = read.getBaseDeletionQualities();
for( int kkk = 0; kkk < readQuals.length; kkk++ ) {
readQuals[kkk] = ( readQuals[kkk] > (byte) read.getMappingQuality() ? (byte) read.getMappingQuality() : readQuals[kkk] ); // cap base quality by mapping quality
readQuals[kkk] = (byte) Math.min( 0xff & readQuals[kkk], read.getMappingQuality()); // cap base quality by mapping quality, as in UG
//readQuals[kkk] = ( readQuals[kkk] > readInsQuals[kkk] ? readInsQuals[kkk] : readQuals[kkk] ); // cap base quality by base insertion quality, needs to be evaluated
//readQuals[kkk] = ( readQuals[kkk] > readDelQuals[kkk] ? readDelQuals[kkk] : readQuals[kkk] ); // cap base quality by base deletion quality, needs to be evaluated
// TODO -- why is Q18 hard-coded here???