Added command-line flag to disble FPGA
Completed integration with FPGA driver
This commit is contained in:
parent
195f0c3e98
commit
de2a2a4cc7
|
|
@ -315,6 +315,10 @@ public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implem
|
|||
@Argument(fullName="allowCyclesInKmerGraphToGeneratePaths", shortName="allowCyclesInKmerGraphToGeneratePaths", doc="If specified, we will allow cycles in the kmer graphs to generate paths with multiple copies of the path sequenece rather than just the shortest paths", required = false)
|
||||
protected boolean allowCyclesInKmerGraphToGeneratePaths = false;
|
||||
|
||||
@Hidden
|
||||
@Argument(fullName="noFpga", shortName="noFpga", doc="If provided, disables the use of the FPGA HMM implementation", required = false)
|
||||
protected boolean noFpga = false;
|
||||
|
||||
|
||||
// the UG engines
|
||||
private UnifiedGenotyperEngine UG_engine = null;
|
||||
|
|
@ -435,7 +439,7 @@ public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implem
|
|||
if ( graphWriter != null ) assemblyEngine.setGraphWriter(graphWriter);
|
||||
if ( useLowQualityBasesForAssembly ) assemblyEngine.setMinBaseQualityToUseInAssembly((byte)1);
|
||||
|
||||
likelihoodCalculationEngine = new LikelihoodCalculationEngine( (byte)gcpHMM, DEBUG, pairHMM );
|
||||
likelihoodCalculationEngine = new LikelihoodCalculationEngine( (byte)gcpHMM, DEBUG, pairHMM, noFpga );
|
||||
|
||||
final MergeVariantsAcrossHaplotypes variantMerger = mergeVariantsViaLD ? new LDMerger(DEBUG, 10, 1) : new MergeVariantsAcrossHaplotypes();
|
||||
|
||||
|
|
|
|||
|
|
@ -80,7 +80,11 @@ public class LikelihoodCalculationEngine {
|
|||
*/
|
||||
private final double EXPECTED_ERROR_RATE_PER_BASE = 0.02;
|
||||
|
||||
public LikelihoodCalculationEngine( final byte constantGCP, final boolean debug, final PairHMM.HMM_IMPLEMENTATION hmmType ) {
|
||||
public LikelihoodCalculationEngine( final byte constantGCP, final boolean debug, final PairHMM.HMM_IMPLEMENTATION hmmType) {
|
||||
this(constantGCP, debug, hmmType, false);
|
||||
}
|
||||
|
||||
public LikelihoodCalculationEngine( final byte constantGCP, final boolean debug, final PairHMM.HMM_IMPLEMENTATION hmmType, final boolean noFpga) {
|
||||
|
||||
switch (hmmType) {
|
||||
case EXACT:
|
||||
|
|
@ -90,7 +94,7 @@ public class LikelihoodCalculationEngine {
|
|||
pairHMM = new Log10PairHMM(false);
|
||||
break;
|
||||
case LOGLESS_CACHING:
|
||||
if (CnyPairHMM.isAvailable())
|
||||
if (!noFpga && CnyPairHMM.isAvailable())
|
||||
pairHMM = new CnyPairHMM();
|
||||
else
|
||||
pairHMM = new LoglessPairHMM();
|
||||
|
|
@ -191,16 +195,17 @@ public class LikelihoodCalculationEngine {
|
|||
final boolean isFirstHaplotype = jjj == 0;
|
||||
final double log10l = pairHMM.computeReadLikelihoodGivenHaplotypeLog10(haplotype.getBases(),
|
||||
read.getReadBases(), readQuals, readInsQuals, readDelQuals, overallGCP, isFirstHaplotype);
|
||||
|
||||
System.err.println(Integer.toString(jjj) + ": " + Double.toString(log10l));
|
||||
perReadAlleleLikelihoodMap.add(read, alleleVersions.get(haplotype), log10l);
|
||||
}
|
||||
}
|
||||
}
|
||||
if ( batchPairHMM != null ) {
|
||||
for( final GATKSAMRecord read : batchedReads ) {
|
||||
final double[] likelihoods = batchPairHMM.batchResult();
|
||||
final double[] likelihoods = batchPairHMM.batchGetResult();
|
||||
for( int jjj = 0; jjj < numHaplotypes; jjj++ ) {
|
||||
final Haplotype haplotype = haplotypes.get(jjj);
|
||||
System.err.println(Integer.toString(jjj) + ": " + Double.toString(likelihoods[jjj]));
|
||||
perReadAlleleLikelihoodMap.add(read, alleleVersions.get(haplotype), likelihoods[jjj]);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,33 +1,91 @@
|
|||
package org.broadinstitute.sting.utils.pairhmm;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.*;
|
||||
import java.lang.reflect.*;
|
||||
|
||||
import org.broadinstitute.sting.utils.haplotype.Haplotype;
|
||||
|
||||
public final class CnyPairHMM extends PairHMM implements BatchPairHMM {
|
||||
private static class HmmInput {
|
||||
public List<Haplotype> haplotypes;
|
||||
public byte[] readBases;
|
||||
public byte[] readQuals;
|
||||
public byte[] insertionGOP;
|
||||
public byte[] deletionGOP;
|
||||
public byte[] overallGCP;
|
||||
public List<Haplotype> haplotypes;
|
||||
};
|
||||
|
||||
private static class ResultQueue {
|
||||
private int offset;
|
||||
private List<double[]> batchResults;
|
||||
|
||||
public ResultQueue() {
|
||||
batchResults = new LinkedList<double[]>();
|
||||
offset = 0;
|
||||
}
|
||||
|
||||
public void push(double[] results) {
|
||||
batchResults.add(results);
|
||||
}
|
||||
|
||||
public double pop() {
|
||||
double[] results = batchResults.get(0);
|
||||
double top = results[offset++];
|
||||
if (offset == results.length) {
|
||||
batchResults.remove(0);
|
||||
offset = 0;
|
||||
}
|
||||
return top;
|
||||
}
|
||||
}
|
||||
|
||||
final static String libPath = "/opt/convey/personalities/32100.1.1.1.0";
|
||||
final static String libName = "gmvhdl_gatk_hmm";
|
||||
|
||||
private static boolean loaded = false;
|
||||
private List<HmmInput> pending = new LinkedList<HmmInput>();
|
||||
private List<HmmInput> batchRequests = new LinkedList<HmmInput>();
|
||||
private ResultQueue resultQueue = new ResultQueue();
|
||||
|
||||
static public boolean isAvailable() {
|
||||
return false;
|
||||
if (!loaded) {
|
||||
File library = new File(libPath + "/lib" + libName + ".so");
|
||||
return library.exists();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private native void initFpga();
|
||||
private native int dequeueRequirement(int reflen, int readlen);
|
||||
private native int enqueue(byte[] haplotypeBases,
|
||||
byte[] readBases,
|
||||
byte[] readQuals,
|
||||
byte[] insertionGOP,
|
||||
byte[] deletionGOP,
|
||||
byte[] overallGCP,
|
||||
int hapStartIndex,
|
||||
boolean recacheReadValues);
|
||||
private native int flushQueue();
|
||||
private native int dequeue(double[] results);
|
||||
private native double softHmm(byte[] haplotypeBases,
|
||||
byte[] readBases,
|
||||
byte[] readQuals,
|
||||
byte[] insertionGOP,
|
||||
byte[] deletionGOP,
|
||||
byte[] overallGCP,
|
||||
int hapStartIndex,
|
||||
boolean recacheReadValues);
|
||||
|
||||
public native void reportStats();
|
||||
|
||||
public void initialize( final int READ_MAX_LENGTH, final int HAPLOTYPE_MAX_LENGTH ) {
|
||||
if (!loaded) {
|
||||
// System.loadLibrary("gmvhdl_gatk_hmm");
|
||||
// initFpga();
|
||||
addLibraryPath(libPath);
|
||||
System.loadLibrary(libName);
|
||||
initFpga();
|
||||
loaded = true;
|
||||
System.out.println("FPGA HMM Initialized");
|
||||
}
|
||||
System.out.println("FPGA HMM Initialized");
|
||||
}
|
||||
|
||||
public void batchAdd(final List<Haplotype> haplotypes,
|
||||
|
|
@ -36,21 +94,44 @@ public final class CnyPairHMM extends PairHMM implements BatchPairHMM {
|
|||
final byte[] insertionGOP,
|
||||
final byte[] deletionGOP,
|
||||
final byte[] overallGCP) {
|
||||
HmmInput test=new HmmInput();
|
||||
test.haplotypes=haplotypes;
|
||||
test.readBases=readBases;
|
||||
test.readQuals=readQuals;
|
||||
test.insertionGOP=insertionGOP;
|
||||
test.deletionGOP=deletionGOP;
|
||||
test.overallGCP=overallGCP;
|
||||
pending.add(test);
|
||||
final int numHaplotypes = haplotypes.size();
|
||||
HmmInput test = new HmmInput();
|
||||
test.readBases = readBases;
|
||||
test.readQuals = readQuals;
|
||||
test.insertionGOP = insertionGOP;
|
||||
test.deletionGOP = deletionGOP;
|
||||
test.overallGCP = overallGCP;
|
||||
test.haplotypes = haplotypes;
|
||||
batchRequests.add(test);
|
||||
for (int jjj = 0; jjj < numHaplotypes; jjj++) {
|
||||
final boolean recacheReadValues = (jjj == 0);
|
||||
final Haplotype haplotype = haplotypes.get(jjj);
|
||||
enqueuePrepare(haplotype.getBases(), readBases);
|
||||
if (enqueue(haplotype.getBases(), readBases, readQuals, insertionGOP, deletionGOP, overallGCP, 0, recacheReadValues) == 0)
|
||||
throw new RuntimeException("FPGA queue overflow in batchAdd");
|
||||
}
|
||||
}
|
||||
|
||||
public double[] batchResult() {
|
||||
HmmInput test=pending.remove(0);
|
||||
double[] results=new double[test.haplotypes.size()];
|
||||
for (int i=0; i<results.length; i++) {
|
||||
results[i]=0.0;
|
||||
public double[] batchGetResult() {
|
||||
double[] results;
|
||||
|
||||
int n = flushQueue();
|
||||
if (n > 0) {
|
||||
results = new double[n];
|
||||
if (dequeue(results) != n)
|
||||
System.out.println("queue underflow in enqueuePrepare");
|
||||
resultQueue.push(results);
|
||||
}
|
||||
|
||||
final HmmInput test = batchRequests.remove(0);
|
||||
final int numHaplotypes = test.haplotypes.size();
|
||||
results = new double[numHaplotypes];
|
||||
for (int jjj = 0; jjj < numHaplotypes; jjj++) {
|
||||
results[jjj] = resultQueue.pop();
|
||||
if (results[jjj]<-60.0) {
|
||||
final Haplotype haplotype = test.haplotypes.get(jjj);
|
||||
results[jjj]=softHmm(haplotype.getBases(), test.readBases, test.readQuals, test.insertionGOP, test.deletionGOP, test.overallGCP, 0, true);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
|
@ -64,5 +145,48 @@ public final class CnyPairHMM extends PairHMM implements BatchPairHMM {
|
|||
final int hapStartIndex,
|
||||
final boolean recacheReadValues ) {
|
||||
return 0.0;
|
||||
}
|
||||
}
|
||||
|
||||
private void enqueuePrepare(byte[] haplotypeBases, byte[] readBases) {
|
||||
double[] results = null;
|
||||
int n = dequeueRequirement(haplotypeBases.length, readBases.length);
|
||||
if (n>0) {
|
||||
results = new double[n];
|
||||
if (dequeue(results)!=n)
|
||||
System.out.println("queue underflow in enqueuePrepare");
|
||||
} else if (n<0) {
|
||||
n = flushQueue();
|
||||
if (n > 0) {
|
||||
results = new double[n];
|
||||
if (dequeue(results) != n)
|
||||
System.out.println("queue underflow in enqueuePrepare");
|
||||
}
|
||||
}
|
||||
|
||||
if (results != null)
|
||||
resultQueue.push(results);
|
||||
}
|
||||
|
||||
public static void addLibraryPath(String pathToAdd) {
|
||||
try {
|
||||
final Field usrPathsField = ClassLoader.class.getDeclaredField("usr_paths");
|
||||
usrPathsField.setAccessible(true);
|
||||
|
||||
//get array of paths
|
||||
final String[] paths = (String[])usrPathsField.get(null);
|
||||
|
||||
//check if the path to add is already present
|
||||
for(String path : paths) {
|
||||
if(path.equals(pathToAdd)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
//add the new path
|
||||
final String[] newPaths = Arrays.copyOf(paths, paths.length + 1);
|
||||
newPaths[newPaths.length-1] = pathToAdd;
|
||||
usrPathsField.set(null, newPaths);
|
||||
} catch (Exception ex) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,5 +12,5 @@ public interface BatchPairHMM {
|
|||
final byte[] deletionGOP,
|
||||
final byte[] overallGCP);
|
||||
|
||||
public double[] batchResult();
|
||||
public double[] batchGetResult();
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue