Refactored PairHMM.initialize to first take haplotype max length and then the read max length so that it is consistent with other PairHMM methods.
This commit is contained in:
parent
bd9875aff5
commit
7519484a38
|
|
@ -109,7 +109,7 @@ public class LikelihoodCalculationEngine {
|
|||
Y_METRIC_LENGTH += 2;
|
||||
|
||||
// initialize arrays to hold the probabilities of being in the match, insertion and deletion cases
|
||||
pairHMM.initialize(X_METRIC_LENGTH, Y_METRIC_LENGTH);
|
||||
pairHMM.initialize(Y_METRIC_LENGTH, X_METRIC_LENGTH);
|
||||
|
||||
// for each sample's reads
|
||||
for( final Map.Entry<String, List<GATKSAMRecord>> sampleEntry : perSampleReadList.entrySet() ) {
|
||||
|
|
|
|||
|
|
@ -385,7 +385,7 @@ public class PairHMMIndelErrorModel {
|
|||
|
||||
if (previousHaplotypeSeen == null) {
|
||||
//no need to reallocate arrays for each new haplotype, as length won't change
|
||||
pairHMM.initialize(X_METRIC_LENGTH, Y_METRIC_LENGTH);
|
||||
pairHMM.initialize(Y_METRIC_LENGTH, X_METRIC_LENGTH);
|
||||
}
|
||||
|
||||
int startIndexInHaplotype = 0;
|
||||
|
|
|
|||
|
|
@ -78,8 +78,8 @@ public class LoglessCachingPairHMM extends PairHMM {
|
|||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void initialize( final int readMaxLength, final int haplotypeMaxLength) {
|
||||
super.initialize(readMaxLength, haplotypeMaxLength);
|
||||
public void initialize( final int haplotypeMaxLength, final int readMaxLength) {
|
||||
super.initialize(haplotypeMaxLength, readMaxLength);
|
||||
|
||||
constantMatrix = new double[X_METRIC_MAX_LENGTH][6];
|
||||
distanceMatrix = new double[X_METRIC_MAX_LENGTH][Y_METRIC_MAX_LENGTH];
|
||||
|
|
|
|||
|
|
@ -136,7 +136,7 @@ public class PairHMMUnitTest extends BaseTest {
|
|||
}
|
||||
|
||||
public double calcLogL( final PairHMM pairHMM, boolean anchorIndel ) {
|
||||
pairHMM.initialize(readBasesWithContext.length, refBasesWithContext.length);
|
||||
pairHMM.initialize(refBasesWithContext.length, readBasesWithContext.length);
|
||||
return pairHMM.computeReadLikelihoodGivenHaplotypeLog10(
|
||||
refBasesWithContext, readBasesWithContext,
|
||||
qualAsBytes(baseQual, false, anchorIndel), qualAsBytes(insQual, true, anchorIndel), qualAsBytes(delQual, true, anchorIndel),
|
||||
|
|
@ -262,7 +262,7 @@ public class PairHMMUnitTest extends BaseTest {
|
|||
double expectedLogL = cfg.expectedLogL(hmm);
|
||||
|
||||
// compare to our theoretical expectation with appropriate tolerance
|
||||
Assert.assertEquals(actualLogL, expectedLogL, cfg.toleranceFromTheoretical(), "Failed with hmm " + hmm);
|
||||
Assert.assertEquals(actualLogL, expectedLogL, cfg.toleranceFromTheoretical(), "Failed with hmm " + hmm + (hmm instanceof Log10PairHMM ? " (" + ((Log10PairHMM)hmm).isDoingExactLog10Calculations() + ")" : ""));
|
||||
// compare to the exact reference implementation with appropriate tolerance
|
||||
Assert.assertEquals(actualLogL, exactLogL, cfg.getTolerance(hmm), "Failed with hmm " + hmm);
|
||||
Assert.assertTrue(MathUtils.goodLog10Probability(actualLogL), "Bad log10 likelihood " + actualLogL);
|
||||
|
|
@ -303,7 +303,7 @@ public class PairHMMUnitTest extends BaseTest {
|
|||
byte[] mread = Arrays.copyOfRange(haplotype1,offset,haplotype1.length-offset);
|
||||
// change single base at position k to C. If it's a C, change to T
|
||||
mread[k] = ( mread[k] == (byte)'C' ? (byte)'T' : (byte)'C');
|
||||
originalHMM.initialize(mread.length, haplotype1.length);
|
||||
originalHMM.initialize(haplotype1.length, mread.length);
|
||||
double res1 = originalHMM.computeReadLikelihoodGivenHaplotypeLog10(
|
||||
haplotype1, mread,
|
||||
quals, gop, gop,
|
||||
|
|
@ -335,7 +335,7 @@ public class PairHMMUnitTest extends BaseTest {
|
|||
byte[] mread = Arrays.copyOfRange(haplotype1,offset,haplotype1.length);
|
||||
// change single base at position k to C. If it's a C, change to T
|
||||
mread[k] = ( mread[k] == (byte)'C' ? (byte)'T' : (byte)'C');
|
||||
originalHMM.initialize(mread.length, haplotype1.length);
|
||||
originalHMM.initialize(haplotype1.length, mread.length);
|
||||
double res1 = originalHMM.computeReadLikelihoodGivenHaplotypeLog10(
|
||||
haplotype1, mread,
|
||||
quals, gop, gop,
|
||||
|
|
@ -372,7 +372,7 @@ public class PairHMMUnitTest extends BaseTest {
|
|||
byte insQual = 37;
|
||||
byte delQual = 37;
|
||||
byte gcp = 10;
|
||||
hmm.initialize(readBases.length, refBases.length);
|
||||
hmm.initialize(refBases.length, readBases.length);
|
||||
double d = hmm.computeReadLikelihoodGivenHaplotypeLog10( refBases, readBases,
|
||||
Utils.dupBytes(baseQual, readBases.length),
|
||||
Utils.dupBytes(insQual, readBases.length),
|
||||
|
|
@ -389,7 +389,7 @@ public class PairHMMUnitTest extends BaseTest {
|
|||
byte insQual = 100;
|
||||
byte delQual = 100;
|
||||
byte gcp = 100;
|
||||
hmm.initialize(readBases.length, refBases.length);
|
||||
hmm.initialize(refBases.length, readBases.length);
|
||||
double d = hmm.computeReadLikelihoodGivenHaplotypeLog10( refBases, readBases,
|
||||
Utils.dupBytes(baseQual, readBases.length),
|
||||
Utils.dupBytes(insQual, readBases.length),
|
||||
|
|
@ -429,7 +429,7 @@ public class PairHMMUnitTest extends BaseTest {
|
|||
byte insQual = 40;
|
||||
byte delQual = 40;
|
||||
byte gcp = 10;
|
||||
hmm.initialize(readBases.length, refBases.length);
|
||||
hmm.initialize(refBases.length, readBases.length);
|
||||
double d = hmm.computeReadLikelihoodGivenHaplotypeLog10( refBases, readBases,
|
||||
Utils.dupBytes(baseQual, readBases.length),
|
||||
Utils.dupBytes(insQual, readBases.length),
|
||||
|
|
@ -447,7 +447,7 @@ public class PairHMMUnitTest extends BaseTest {
|
|||
byte delQual = 40;
|
||||
byte gcp = 10;
|
||||
|
||||
exactHMM.initialize(readBases.length, refBases.length);
|
||||
exactHMM.initialize(refBases.length, readBases.length);
|
||||
double d = exactHMM.computeReadLikelihoodGivenHaplotypeLog10( refBases, readBases,
|
||||
Utils.dupBytes(baseQual, readBases.length),
|
||||
Utils.dupBytes(insQual, readBases.length),
|
||||
|
|
@ -455,7 +455,7 @@ public class PairHMMUnitTest extends BaseTest {
|
|||
Utils.dupBytes(gcp, readBases.length), 0, true);
|
||||
//exactHMM.dumpMatrices();
|
||||
|
||||
loglessHMM.initialize(readBases.length, refBases.length);
|
||||
loglessHMM.initialize(refBases.length, readBases.length);
|
||||
double logless = loglessHMM.computeReadLikelihoodGivenHaplotypeLog10( refBases, readBases,
|
||||
Utils.dupBytes(baseQual, readBases.length),
|
||||
Utils.dupBytes(insQual, readBases.length),
|
||||
|
|
@ -489,7 +489,7 @@ public class PairHMMUnitTest extends BaseTest {
|
|||
final int maxHaplotypeLength = refBases.length + nExtraMaxSize;
|
||||
final int maxReadLength = readBases.length + nExtraMaxSize;
|
||||
|
||||
hmm.initialize(maxReadLength, maxHaplotypeLength);
|
||||
hmm.initialize(maxHaplotypeLength, maxReadLength);
|
||||
double d = hmm.computeReadLikelihoodGivenHaplotypeLog10( refBases, readBases,
|
||||
quals,
|
||||
insQual,
|
||||
|
|
@ -535,7 +535,7 @@ public class PairHMMUnitTest extends BaseTest {
|
|||
final int maxHaplotypeLength = prefix.length() + root1.length();
|
||||
|
||||
// the initialization occurs once, at the start of the evalution of reads
|
||||
hmm.initialize(maxReadLength, maxHaplotypeLength);
|
||||
hmm.initialize(maxHaplotypeLength, maxReadLength);
|
||||
|
||||
for ( int prefixStart = prefix.length(); prefixStart >= 0; prefixStart-- ) {
|
||||
final String myPrefix = prefix.substring(prefixStart, prefix.length());
|
||||
|
|
@ -633,7 +633,7 @@ public class PairHMMUnitTest extends BaseTest {
|
|||
byte[] refBases = "AAAT".getBytes();
|
||||
byte[] baseQuals = Utils.dupBytes((byte)30, readBases.length);
|
||||
|
||||
hmm.initialize(2, 3);
|
||||
hmm.initialize(3, 2);
|
||||
double d = hmm.computeReadLikelihoodGivenHaplotypeLog10( refBases, readBases,
|
||||
baseQuals, baseQuals, baseQuals, baseQuals, 0, true);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -64,8 +64,8 @@ public class Log10PairHMM extends PairHMM {
|
|||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void initialize( final int readMaxLength, final int haplotypeMaxLength) {
|
||||
super.initialize(readMaxLength, haplotypeMaxLength);
|
||||
public void initialize( final int haplotypeMaxLength, final int readMaxLength) {
|
||||
super.initialize(haplotypeMaxLength, readMaxLength);
|
||||
|
||||
for( int iii=0; iii < X_METRIC_MAX_LENGTH; iii++ ) {
|
||||
Arrays.fill(matchMetricArray[iii], Double.NEGATIVE_INFINITY);
|
||||
|
|
|
|||
|
|
@ -61,10 +61,10 @@ public abstract class PairHMM {
|
|||
|
||||
/**
|
||||
* Initialize this PairHMM, making it suitable to run against a read and haplotype with given lengths
|
||||
* @param readMaxLength the max length of reads we want to use with this PairHMM
|
||||
* @param haplotypeMaxLength the max length of haplotypes we want to use with this PairHMM
|
||||
* @param readMaxLength the max length of reads we want to use with this PairHMM
|
||||
*/
|
||||
public void initialize( final int readMaxLength, final int haplotypeMaxLength ) {
|
||||
public void initialize( final int haplotypeMaxLength, final int readMaxLength ) {
|
||||
if ( readMaxLength <= 0 ) throw new IllegalArgumentException("READ_MAX_LENGTH must be > 0 but got " + readMaxLength);
|
||||
if ( haplotypeMaxLength <= 0 ) throw new IllegalArgumentException("HAPLOTYPE_MAX_LENGTH must be > 0 but got " + haplotypeMaxLength);
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue