Switched from LSF command line wrappers to JNA wrappers around the C API. Side effects:

- bsub command line is no longer fully printed out.
- extraBsubArgs hack is now a callback function updateJobRun.
Updated FullCallingPipelineTest to reflect latest changes to fullCallingPipeline.q.
Added a pipeline that tests the UGv2 runtimes at different bam counts and memory limits.
Updated VE packages that live in oneoffs to compile to oneoffs.
Added a hack to replace the deprecated symbol environ in Mac OS X 10.5+ which is needed by LSF7 on Mac.


git-svn-id: file:///humgen/gsa-scr1/gsa-engineering/svn_contents/trunk@4816 348d0f76-0448-11de-a6fe-93d51630548a
This commit is contained in:
kshakir 2010-12-10 04:36:06 +00:00
parent 2bf4fc94f0
commit 56433ebf6b
36 changed files with 23161 additions and 320 deletions

View File

@ -287,6 +287,7 @@
<fileset dir="${java.classes}" includes="**/utils/**/*.class"/>
<fileset dir="${java.classes}" includes="**/commandline/**/*.class"/>
<fileset dir="${java.classes}" includes="**/sting/datasources/**/*.class"/>
<fileset dir="${java.classes}" includes="**/sting/jna/**/*.class"/>
<fileset dir="${java.classes}" includes="net/sf/picard/**/*.class"/>
<manifest>
<attribute name="Premain-Class" value="org.broadinstitute.sting.utils.instrumentation.Sizeof" />

View File

@ -0,0 +1,10 @@
CC=gcc
CCFLAGS=-Wall -dynamiclib -arch i386 -arch x86_64
libenvironhack.dylib: libenvironhack.c
$(CC) $(CCFLAGS) -init _init_environ $< -o $@
all: libenvironhack.dylib
clean:
rm -f libenvironhack.dylib

View File

@ -0,0 +1,37 @@
/*
* Copyright (c) 2010, The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
/*
LSF 7.0.6 on the mac is missing the unsatisfied exported symbol for environ which was removed on MacOS X 10.5+.
nm $LSF_LIBDIR/liblsf.dylib | grep environ
See "man environ" for more info, along with http://lists.apple.com/archives/java-dev/2007/Dec/msg00096.html
*/
#include <crt_externs.h>
char **environ = (char **)0;
void init_environ(void) {
environ = (*_NSGetEnviron());
}

Binary file not shown.

View File

@ -37,6 +37,9 @@
<dependency org="commons-logging" name="commons-logging" rev="1.1.1"/>
<dependency org="commons-io" name="commons-io" rev="2.0"/>
<!-- Dependencies for LSF library -->
<dependency org="net.java.dev.jna" name="jna" rev="3.2.7"/>
<!-- Dependencies for Queue GATK Extensions code generator living in java/src -->
<dependency org="commons-lang" name="commons-lang" rev="2.5"/>

View File

@ -198,13 +198,13 @@ public class VariantEvalWalker extends RodWalker<Integer, Integer> implements Tr
Set<String> rsIDsToExclude = null;
@Argument(shortName="aatk", fullName="aminoAcidTransitionKey", doc="required for the amino acid transition table; this is the key in the info field for the VCF for the transition", required = false)
protected String aminoAcidTransitionKey = null;
public String aminoAcidTransitionKey = null;
@Argument(shortName="aats", fullName="aminoAcidTransitionSplit", doc="required for the amino acid transition table, this is the key on which to split the info field value to get the reference and alternate amino acids", required=false)
protected String aminoAcidTransitionSplit = null;
public String aminoAcidTransitionSplit = null;
@Argument(shortName="aatUseCodons", fullName="aminoAcidsRepresentedByCodons", doc="for the amino acid table, specifiy that the transitions are represented as codon changes, and not directly amino acid names", required = false)
protected boolean aatUseCodons = false;
public boolean aatUseCodons = false;
@Argument(shortName="disI", fullName="discordantInteresting", doc="If passed, write discordant sites as interesting", required=false)
protected boolean DISCORDANT_INTERESTING = false;

View File

@ -1,5 +1,6 @@
package org.broadinstitute.sting.gatk.walkers.varianteval;
import org.apache.log4j.Logger;
import org.broad.tribble.util.variantcontext.VariantContext;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
@ -45,6 +46,10 @@ public abstract class VariantEvaluator {
return veWalker;
}
protected Logger getLogger() {
return veWalker.getLogger();
}
public abstract boolean enabled();
//public boolean processedAnySites() { return processedASite; }
//protected void markSiteAsProcessed() { processedASite = true; }

View File

@ -0,0 +1,58 @@
/*
* Copyright (c) 2010, The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.jna.clibrary;
import com.sun.jna.Platform;
/**
* Collection of functions that are in the standard CLibrary but are associated with different headers on different platforms.
*/
public class JNAUtils {
/**
* Defined in different places on different systems, this is currently 256 on mac and 64 everywhere else.
*/
public static final int MAXHOSTNAMELEN;
/**
* Maximum path length.
*/
public static final int MAXPATHLEN = 1024;
static {
int maxhostnamelen = 64;
if (Platform.isMac())
maxhostnamelen = 256;
MAXHOSTNAMELEN = maxhostnamelen;
}
/**
* Converts a non-zero int to true, otherwise false.
* @param val int to check.
* @return true if val is non-zero.
*/
public static boolean toBoolean(int val) {
return val != 0;
}
}

View File

@ -0,0 +1,45 @@
/*
* Copyright (c) 2010, The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.jna.clibrary;
import com.sun.jna.NativeLong;
import com.sun.jna.Structure;
/**
* Sparse JNA port of time.h
*/
@SuppressWarnings("unused")
public class Time {
public static class timeval extends Structure {
public static class ByReference extends timeval implements Structure.ByReference {
}
public static class ByValue extends timeval implements Structure.ByValue {
}
public NativeLong tv_sec;
public NativeLong tv_usec;
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
package org.broadinstitute.sting.gatk.walkers.varianteval;
package org.broadinstitute.sting.oneoffprojects.walkers.varianteval;
import org.broad.tribble.util.variantcontext.VariantContext;
import org.broad.tribble.vcf.VCFConstants;
@ -6,11 +6,12 @@ import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.variantcontext.VariantContextUtils;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.varianteval.VariantEvalWalker;
import org.broadinstitute.sting.gatk.walkers.varianteval.VariantEvaluator;
import org.broadinstitute.sting.utils.exceptions.UserException;
import org.broadinstitute.sting.utils.report.tags.Analysis;
import org.broadinstitute.sting.utils.report.tags.DataPoint;
import org.broadinstitute.sting.utils.report.utils.TableType;
import org.broadinstitute.sting.utils.vcf.VCFUtils;
import java.util.HashMap;
import java.util.List;
@ -72,7 +73,7 @@ public class AlleleFrequencyComparison extends VariantEvaluator {
Map<String,Object> attributes = new HashMap<String,Object>();
VariantContextUtils.calculateChromosomeCounts(vc,attributes,false);
vc = VariantContext.modifyAttributes(vc,attributes);
getVEWalker().getLogger().debug(String.format("%s %s | %s %s",attributes.get("AC"),attributes.get("AF"),vc.getAttribute("AC"),vc.getAttribute("AF")));
getLogger().debug(String.format("%s %s | %s %s",attributes.get("AC"),attributes.get("AF"),vc.getAttribute("AC"),vc.getAttribute("AF")));
if ( attributes.size() == 2 && missingField(vc) ) {
throw new org.broadinstitute.sting.utils.exceptions.StingException("VariantContext should have had attributes modified but did not");
}

View File

@ -1,10 +1,12 @@
package org.broadinstitute.sting.gatk.walkers.varianteval;
package org.broadinstitute.sting.oneoffprojects.walkers.varianteval;
import org.broad.tribble.util.variantcontext.VariantContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.variantcontext.VariantContextUtils;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.varianteval.VariantEvalWalker;
import org.broadinstitute.sting.gatk.walkers.varianteval.VariantEvaluator;
import org.broadinstitute.sting.utils.report.tags.Analysis;
import org.broadinstitute.sting.utils.report.tags.DataPoint;
import org.broadinstitute.sting.utils.report.utils.TableType;
@ -178,7 +180,7 @@ public class AminoAcidTransition extends VariantEvaluator {
first = parsedNames [0];
second = parsedNames [1];
} catch (ArrayIndexOutOfBoundsException e) {
veWalker.getLogger().warn("Error prasing variant context with value "+eval.getAttribute(infoKey));
getLogger().warn("Error parsing variant context with value "+eval.getAttribute(infoKey));
}
AminoAcid reference;
AminoAcid alternate;

View File

@ -0,0 +1,45 @@
/*
* Copyright (c) 2010, The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.jna.clibrary;
import com.sun.jna.Library;
import com.sun.jna.Native;
import org.broadinstitute.sting.BaseTest;
import org.testng.Assert;
import org.testng.annotations.Test;
public class JNAUtilsUnitTest extends BaseTest {
public interface CLib extends Library {
public CLib INSTANCE = (CLib) Native.loadLibrary("c", CLib.class);
public int atol(String str);
}
@Test
public void testCLibJna() {
int result = CLib.INSTANCE.atol("10");
Assert.assertEquals(result, 10);
}
}

View File

@ -0,0 +1,78 @@
/*
* Copyright (c) 2010, The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.jna.lsf.v7_0_6;
import org.apache.commons.io.FileUtils;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.broadinstitute.sting.BaseTest;
import org.broadinstitute.sting.jna.lsf.v7_0_6.LibBat.*;
import java.io.File;
/**
* Really a unit test, but this test will only run on systems with LSF setup.
*/
public class LibBatIntegrationTest extends BaseTest {
@Test
public void testClusterName() {
String clusterName = LibLsf.ls_getclustername();
System.out.println("Cluster name: " + clusterName);
Assert.assertNotNull(clusterName);
}
@Test
public void testSubmitEcho() {
String queue = "hour";
File outFile = new File("LibBatIntegrationTest.out");
Assert.assertFalse(LibBat.lsb_init("LibBatIntegrationTest") < 0, LibBat.lsb_sperror("lsb_init() failed"));
submit req = new submit();
for (int i = 0; i < LibLsf.LSF_RLIM_NLIMITS; i++)
req.rLimits[i] = LibLsf.DEFAULT_RLIMIT;
req.projectName = "LibBatIntegrationTest";
req.options |= LibBat.SUB_PROJECT_NAME;
req.queue = queue;
req.options |= LibBat.SUB_QUEUE;
req.outFile = outFile.getPath();
req.options |= LibBat.SUB_OUT_FILE;
req.command = "echo \"Hello world.\"";
req.options2 |= LibBat.SUB2_BSUB_BLOCK;
submitReply reply = new submitReply();
long jobId = LibBat.lsb_submit(req, reply);
Assert.assertFalse(jobId < 0, LibBat.lsb_sperror("Error dispatching"));
Assert.assertTrue(FileUtils.waitFor(outFile, 120), "File not found: " + outFile.getAbsolutePath());
Assert.assertTrue(outFile.delete(), "Unable to delete " + outFile.getAbsolutePath());
Assert.assertEquals(reply.queue, req.queue, "LSF reply queue does not match requested queue.");
}
}

View File

@ -0,0 +1,52 @@
import org.broadinstitute.sting.datasources.pipeline.Pipeline
import org.broadinstitute.sting.queue.QScript
import org.broadinstitute.sting.queue.extensions.gatk._
import org.broadinstitute.sting.utils.yaml.YamlUtils
import collection.JavaConversions._
class UGMemoryTests extends QScript {
qscript =>
@Argument(doc="the YAML file specifying inputs, interval lists, reference sequence, etc.", shortName="Y")
var yamlFile: File = _
@Input(doc="The path to the GenomeAnalysisTK.jar file.", shortName="gatk")
var gatkJar: File = null
@Input(doc="per-sample downsampling level",shortName="dcov",required=false)
var downsampling_coverage = 300
def script = {
val pipeline = YamlUtils.load(classOf[Pipeline], qscript.yamlFile)
val memoryLimits = List(1,2,4,6,8,10,12,16)
val recalibratedSamples = pipeline.getSamples.map(_.getBamFiles.get("recalibrated")).toList
val squid1 = "C315"
val squid2 = "C338"
val numBamsList = List(10, 20, 50, 70, 100, 120, 150)
val squid1Bams = recalibratedSamples.filter(_.getAbsolutePath.contains(squid1))
val squid2Bams = recalibratedSamples.filter(_.getAbsolutePath.contains(squid2))
for (memoryLimit <- memoryLimits) {
for (numBams <- numBamsList) {
val dir = "%03d_bams_%02dg".format(numBams, memoryLimit)
val snps = new UnifiedGenotyper
snps.jobOutputFile = new File(dir, "UnifiedGenotyper.out")
snps.out = new File(dir, "UnifiedGenotyper.vcf")
snps.input_file = squid1Bams.take(numBams/2) ++ squid2Bams.take(numBams/2)
snps.memoryLimit = Some(memoryLimit)
snps.jarFile = qscript.gatkJar
snps.reference_sequence = pipeline.getProject.getReferenceFile
snps.intervals = List(pipeline.getProject.getIntervalList)
snps.DBSNP = pipeline.getProject.getDbsnpFile
snps.downsample_to_coverage = Some(qscript.downsampling_coverage)
snps.annotation ++= List("AlleleBalance")
snps.group :+= "Standard"
add(snps)
}
}
}
}

View File

@ -0,0 +1,12 @@
#!/bin/sh
STING_HOME=/humgen/gsa-hpprojects/dev/kshakir/src/Sting_patches
TMP_DIR=/broad/shptmp/kshakir
JOB_QUEUE=gsa
if [ "$1" == "debug" ]; then
JAVA_DEBUG="-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=8555"
shift
fi
java $JAVA_DEBUG -Djava.io.tmpdir="$TMP_DIR" -jar "$STING_HOME"/dist/Queue.jar -jobPrefix QTest -S "$STING_HOME"/scala/qscript/kshakir/UGMemoryTests.scala -Y UGMemoryTests.yaml -gatk "$STING_HOME"/dist/GenomeAnalysisTK.jar -jobQueue $JOB_QUEUE $@

View File

@ -0,0 +1,610 @@
{
project: {
name: UGMemoryTests,
referenceFile: /seq/references/Homo_sapiens_assembly19/v1/Homo_sapiens_assembly19.fasta,
dbsnpFile: /humgen/gsa-hpprojects/GATK/data/dbsnp_129_b37.rod,
intervalList: /humgen/gsa-hpprojects/GATK/data/whole_exome_agilent_1.1_refseq_plus_3_boosters.Homo_sapiens_assembly19.targets.chr1.interval_list
},
samples: [
{
id: C315_32742,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/32742/v3/32742.bam }
},
{
id: C315_28-0154,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/28-0154/v3/28-0154.bam }
},
{
id: C315_A08694,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/A08694/v1/A08694.bam }
},
{
id: C315_9218,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/9218/v3/9218.bam }
},
{
id: C315_42284,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/42284/v2/42284.bam }
},
{
id: C315_395607-59,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/395607-59/v1/395607-59.bam }
},
{
id: C315_12751,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/12751/v2/12751.bam }
},
{
id: C315_A02027,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/A02027/v5/A02027.bam }
},
{
id: C315_389822-58,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/389822-58/v1/389822-58.bam }
},
{
id: C315_15899,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/15899/v1/15899.bam }
},
{
id: C315_47661,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/47661/v3/47661.bam }
},
{
id: C315_209541-66,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/209541-66/v1/209541-66.bam }
},
{
id: C315_49535,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/49535/v2/49535.bam }
},
{
id: C315_496560-33,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/496560-33/v1/496560-33.bam }
},
{
id: C315_4039,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/4039/v2/4039.bam }
},
{
id: C315_492677-36,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/492677-36/v1/492677-36.bam }
},
{
id: C315_40716,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/40716/v2/40716.bam }
},
{
id: C315_38201,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/38201/v2/38201.bam }
},
{
id: C315_500277-48,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/500277-48/v1/500277-48.bam }
},
{
id: C315_22866,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/22866/v1/22866.bam }
},
{
id: C315_507365-44,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/507365-44/v2/507365-44.bam }
},
{
id: C315_407001-34,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/407001-34/v1/407001-34.bam }
},
{
id: C315_51248,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/51248/v1/51248.bam }
},
{
id: C315_427532-47,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/427532-47/v1/427532-47.bam }
},
{
id: C315_6767,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/6767/v2/6767.bam }
},
{
id: C315_52221,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/52221/v2/52221.bam }
},
{
id: C315_14779,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/14779/v1/14779.bam }
},
{
id: C315_19309,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/19309/v1/19309.bam }
},
{
id: C315_497395-47,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/497395-47/v1/497395-47.bam }
},
{
id: C315_50333,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/50333/v1/50333.bam }
},
{
id: C315_472444-60,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/472444-60/v2/472444-60.bam }
},
{
id: C315_548668-34,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/548668-34/v1/548668-34.bam }
},
{
id: C315_335840-68,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/335840-68/v1/335840-68.bam }
},
{
id: C315_265276-65,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/265276-65/v3/265276-65.bam }
},
{
id: C315_17480,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/17480/v1/17480.bam }
},
{
id: C315_426521-75,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/426521-75/v1/426521-75.bam }
},
{
id: C315_222034-64,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/222034-64/v1/222034-64.bam }
},
{
id: C315_pcath980626-1,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/pcath980626-1/v3/pcath980626-1.bam }
},
{
id: C315_527830-33,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/527830-33/v1/527830-33.bam }
},
{
id: C315_421826-53,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/421826-53/v1/421826-53.bam }
},
{
id: C315_217094-74,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/217094-74/v3/217094-74.bam }
},
{
id: C315_562474-57,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/562474-57/v1/562474-57.bam }
},
{
id: C315_434049-48,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/434049-48/v1/434049-48.bam }
},
{
id: C315_360268-49,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/360268-49/v2/360268-49.bam }
},
{
id: C315_528492-65,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/528492-65/v1/528492-65.bam }
},
{
id: C315_206691-53,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/206691-53/v1/206691-53.bam }
},
{
id: C315_19156,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/19156/v1/19156.bam }
},
{
id: C315_364827-70,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/364827-70/v1/364827-70.bam }
},
{
id: C315_544273-42,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/544273-42/v1/544273-42.bam }
},
{
id: C315_41645,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/41645/v1/41645.bam }
},
{
id: C315_39048,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/39048/v1/39048.bam }
},
{
id: C315_14007,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/14007/v2/14007.bam }
},
{
id: C315_395725-33,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/395725-33/v1/395725-33.bam }
},
{
id: C315_42291,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/42291/v1/42291.bam }
},
{
id: C315_31981,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/31981/v2/31981.bam }
},
{
id: C315_87A84DD1,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/87A84DD1/v2/87A84DD1.bam }
},
{
id: C315_54393,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/54393/v1/54393.bam }
},
{
id: C315_15974,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/15974/v1/15974.bam }
},
{
id: C315_543091-49,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/543091-49/v2/543091-49.bam }
},
{
id: C315_283916-44,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/283916-44/v1/283916-44.bam }
},
{
id: C315_49900,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/49900/v2/49900.bam }
},
{
id: C315_460187-33,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/460187-33/v2/460187-33.bam }
},
{
id: C315_48019,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/48019/v2/48019.bam }
},
{
id: C315_329427-69,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/329427-69/v1/329427-69.bam }
},
{
id: C315_A06518,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/A06518/v1/A06518.bam }
},
{
id: C315_35484,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/35484/v5/35484.bam }
},
{
id: C315_325920-37,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/325920-37/v1/325920-37.bam }
},
{
id: C315_25775,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/25775/v1/25775.bam }
},
{
id: C315_202228-58,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/202228-58/v1/202228-58.bam }
},
{
id: C315_542914-48,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/542914-48/v3/542914-48.bam }
},
{
id: C315_36047,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/36047/v2/36047.bam }
},
{
id: C315_232846-59,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/232846-59/v1/232846-59.bam }
},
{
id: C315_5760,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/5760/v1/5760.bam }
},
{
id: C315_348907-53,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/348907-53/v1/348907-53.bam }
},
{
id: C315_8891,
bamFiles: { recalibrated: /seq/picard_aggregation/C315/8891/v2/8891.bam }
},
{
id: C338_00164219,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00164219/v3/00164219.bam }
},
{
id: C338_00339745,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00339745/v2/00339745.bam }
},
{
id: C338_00339753,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00339753/v2/00339753.bam }
},
{
id: C338_00347320,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00347320/v1/00347320.bam }
},
{
id: C338_00341953,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00341953/v2/00341953.bam }
},
{
id: C338_00347335,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00347335/v2/00347335.bam }
},
{
id: C338_00347323,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00347323/v2/00347323.bam }
},
{
id: C338_00313755,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00313755/v2/00313755.bam }
},
{
id: C338_00344108,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00344108/v1/00344108.bam }
},
{
id: C338_00313306,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00313306/v1/00313306.bam }
},
{
id: C338_00341959,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00341959/v3/00341959.bam }
},
{
id: C338_00344030,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00344030/v3/00344030.bam }
},
{
id: C338_00344099,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00344099/v3/00344099.bam }
},
{
id: C338_00339767,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00339767/v2/00339767.bam }
},
{
id: C338_00347317,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00347317/v3/00347317.bam }
},
{
id: C338_00338716,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00338716/v2/00338716.bam }
},
{
id: C338_00314085,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00314085/v2/00314085.bam }
},
{
id: C338_00339707,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00339707/v2/00339707.bam }
},
{
id: C338_00342149,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00342149/v2/00342149.bam }
},
{
id: C338_00339680,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00339680/v3/00339680.bam }
},
{
id: C338_00314089,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00314089/v1/00314089.bam }
},
{
id: C338_00347305,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00347305/v3/00347305.bam }
},
{
id: C338_00347299,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00347299/v2/00347299.bam }
},
{
id: C338_00314127,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00314127/v4/00314127.bam }
},
{
id: C338_00314042,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00314042/v1/00314042.bam }
},
{
id: C338_00313624,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00313624/v2/00313624.bam }
},
{
id: C338_00347929,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00347929/v2/00347929.bam }
},
{
id: C338_00340223,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00340223/v3/00340223.bam }
},
{
id: C338_00314130,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00314130/v2/00314130.bam }
},
{
id: C338_00342001,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00342001/v3/00342001.bam }
},
{
id: C338_00313906,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00313906/v2/00313906.bam }
},
{
id: C338_00313844,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00313844/v2/00313844.bam }
},
{
id: C338_00153519,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00153519/v1/00153519.bam }
},
{
id: C338_00071493,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00071493/v2/00071493.bam }
},
{
id: C338_00314083,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00314083/v2/00314083.bam }
},
{
id: C338_00334568,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00334568/v6/00334568.bam }
},
{
id: C338_00346347,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00346347/v3/00346347.bam }
},
{
id: C338_00180648,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00180648/v1/00180648.bam }
},
{
id: C338_00187275,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00187275/v1/00187275.bam }
},
{
id: C338_00346283,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00346283/v3/00346283.bam }
},
{
id: C338_00313933,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00313933/v5/00313933.bam }
},
{
id: C338_00313479,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00313479/v2/00313479.bam }
},
{
id: C338_00313422,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00313422/v1/00313422.bam }
},
{
id: C338_00153459,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00153459/v2/00153459.bam }
},
{
id: C338_00340147,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00340147/v3/00340147.bam }
},
{
id: C338_00308255,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00308255/v3/00308255.bam }
},
{
id: C338_00341944,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00341944/v3/00341944.bam }
},
{
id: C338_00314081,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00314081/v2/00314081.bam }
},
{
id: C338_00339729,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00339729/v2/00339729.bam }
},
{
id: C338_00340121,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00340121/v2/00340121.bam }
},
{
id: C338_00164078,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00164078/v1/00164078.bam }
},
{
id: C338_00314037,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00314037/v2/00314037.bam }
},
{
id: C338_00313708,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00313708/v3/00313708.bam }
},
{
id: C338_00346266,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00346266/v3/00346266.bam }
},
{
id: C338_00313914,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00313914/v1/00313914.bam }
},
{
id: C338_00340093,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00340093/v2/00340093.bam }
},
{
id: C338_00313909,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00313909/v1/00313909.bam }
},
{
id: C338_00347739,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00347739/v2/00347739.bam }
},
{
id: C338_00338680,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00338680/v3/00338680.bam }
},
{
id: C338_00347283,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00347283/v3/00347283.bam }
},
{
id: C338_00180679,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00180679/v1/00180679.bam }
},
{
id: C338_00313247,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00313247/v1/00313247.bam }
},
{
id: C338_00174844,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00174844/v2/00174844.bam }
},
{
id: C338_00313450,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00313450/v1/00313450.bam }
},
{
id: C338_00313626,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00313626/v2/00313626.bam }
},
{
id: C338_00313311,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00313311/v2/00313311.bam }
},
{
id: C338_00313988,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00313988/v2/00313988.bam }
},
{
id: C338_00314078,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00314078/v2/00314078.bam }
},
{
id: C338_00313721,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00313721/v2/00313721.bam }
},
{
id: C338_00347894,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00347894/v2/00347894.bam }
},
{
id: C338_00329142,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00329142/v1/00329142.bam }
},
{
id: C338_00313304,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00313304/v1/00313304.bam }
},
{
id: C338_00334599,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00334599/v2/00334599.bam }
},
{
id: C338_00339674,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00339674/v2/00339674.bam }
},
{
id: C338_00234650,
bamFiles: { recalibrated: /seq/picard_aggregation/C338/00234650/v1/00234650.bam }
}
]
}

View File

@ -7,7 +7,7 @@ import java.io.File
/**
* Dispatches jobs to a compute cluster.
*/
trait DispatchJobRunner extends JobRunner {
trait DispatchJobRunner extends JobRunner[CommandLineFunction] {
/**
* Builds a command line that can be run to force an automount of the directories.
* @param function Function to look jobDirectories.

View File

@ -9,7 +9,7 @@ import org.broadinstitute.sting.queue.function.QFunction
* execute the function in the edge.
*/
class FunctionEdge(var function: QFunction) extends QEdge {
var runner: JobRunner =_
var runner: JobRunner[_] =_
/**
* The number of times this edge has been run.

View File

@ -0,0 +1,7 @@
package org.broadinstitute.sting.queue.engine
import org.broadinstitute.sting.queue.function.InProcessFunction
class InProcessJobManager extends JobManager[InProcessFunction, InProcessRunner] {
def create(function: InProcessFunction) = new InProcessRunner(function)
}

View File

@ -6,7 +6,7 @@ import org.broadinstitute.sting.queue.util.Logging
/**
* Runs a function that executes in process and does not fork out an external process.
*/
class InProcessRunner(val function: InProcessFunction) extends JobRunner with Logging {
class InProcessRunner(val function: InProcessFunction) extends JobRunner[InProcessFunction] with Logging {
private var runStatus: RunnerStatus.Value = _
def start() = {
@ -19,8 +19,8 @@ class InProcessRunner(val function: InProcessFunction) extends JobRunner with Lo
function.deleteLogs()
function.deleteOutputs()
runStatus = RunnerStatus.RUNNING
function.mkOutputDirectories()
runStatus = RunnerStatus.RUNNING
function.run()
function.doneOutputs.foreach(_.createNewFile())
writeDone()

View File

@ -0,0 +1,11 @@
package org.broadinstitute.sting.queue.engine
import org.broadinstitute.sting.queue.function.QFunction
/**
* Creates and stops JobRunners
*/
trait JobManager[TFunction <: QFunction, TRunner <: JobRunner[TFunction]] {
def create(function: TFunction): TRunner
def tryStop(runners: List[JobRunner[_]]) = {}
}

View File

@ -7,7 +7,7 @@ import org.broadinstitute.sting.queue.function.QFunction
/**
* Base interface for job runners.
*/
trait JobRunner {
trait JobRunner[TFunction <: QFunction] {
/**
* Runs the function.
* After the function returns the status of the function should
@ -26,7 +26,7 @@ trait JobRunner {
/**
* Returns the function to be run.
*/
def function: QFunction
def function: TFunction
protected def writeDone() = {
val content = "%s%nDone.".format(function.description)
@ -37,6 +37,9 @@ trait JobRunner {
IOUtils.writeContents(functionErrorFile, content)
}
/**
* Writes the stack trace to the error file.
*/
protected def writeStackTrace(e: Throwable) = {
val stackTrace = new StringWriter
val printWriter = new PrintWriter(stackTrace)
@ -46,5 +49,16 @@ trait JobRunner {
IOUtils.writeContents(functionErrorFile, stackTrace.toString)
}
/**
* Calls back to a hook that an expert user can setup to modify a job.
* @param value Value to modify.
*/
protected def updateJobRun(value: Any) = {
val updater = function.updateJobRun
if (updater != null)
if (updater.isDefinedAt(value))
updater(value)
}
private def functionErrorFile = if (function.jobErrorFile != null) function.jobErrorFile else function.jobOutputFile
}

View File

@ -0,0 +1,11 @@
package org.broadinstitute.sting.queue.engine
import org.broadinstitute.sting.queue.function.CommandLineFunction
/**
* Creates and stops Lsf706JobRunners
*/
class Lsf706JobManager extends JobManager[CommandLineFunction, Lsf706JobRunner] {
def create(function: CommandLineFunction) = new Lsf706JobRunner(function)
override def tryStop(runners: List[JobRunner[_]]) = Lsf706JobRunner.tryStop(runners)
}

View File

@ -0,0 +1,158 @@
package org.broadinstitute.sting.queue.engine
import java.io.File
import com.sun.jna.Memory
import org.broadinstitute.sting.queue.function.CommandLineFunction
import org.broadinstitute.sting.queue.util._
import org.broadinstitute.sting.queue.QException
import org.broadinstitute.sting.jna.lsf.v7_0_6.{LibLsf, LibBat}
import org.broadinstitute.sting.jna.lsf.v7_0_6.LibBat.{signalBulkJobs, submitReply, submit}
/**
* Runs jobs on an LSF compute cluster.
*/
class Lsf706JobRunner(function: CommandLineFunction) extends LsfJobRunner(function) with Logging {
// Run the static initializer for Lsf706JobRunner
Lsf706JobRunner
/**
* Dispatches the function on the LSF cluster.
* @param function Command to run.
*/
def start() = {
try {
val request = new submit
for (i <- 0 until LibLsf.LSF_RLIM_NLIMITS)
request.rLimits(i) = LibLsf.DEFAULT_RLIMIT;
request.outFile = function.jobOutputFile.getPath
request.options |= LibBat.SUB_OUT_FILE
if (function.jobErrorFile != null) {
request.errFile = function.jobErrorFile.getPath
request.options |= LibBat.SUB_ERR_FILE
}
if (function.jobProject != null) {
request.projectName = function.jobProject
request.options |= LibBat.SUB_PROJECT_NAME
}
if (function.jobQueue != null) {
request.queue = function.jobQueue
request.options |= LibBat.SUB_QUEUE
}
if (IOUtils.absolute(new File(".")) != function.commandDirectory) {
request.cwd = function.commandDirectory.getPath
request.options3 |= LibBat.SUB3_CWD
}
if (function.jobRestartable) {
request.options |= LibBat.SUB_RERUNNABLE
}
if (function.memoryLimit.isDefined) {
request.resReq = "rusage[mem=" + function.memoryLimit.get + "]"
request.options |= LibBat.SUB_RES_REQ
}
if (function.description != null) {
request.jobName = function.description.take(1000)
request.options |= LibBat.SUB_JOB_NAME
}
exec = writeExec()
request.command = "sh " + exec
preExec = writePreExec()
request.preExecCmd = "sh " + preExec
request.options |= LibBat.SUB_PRE_EXEC
postExec = writePostExec()
request.postExecCmd = "sh " + postExec
request.options3 |= LibBat.SUB3_POST_EXEC
// Allow advanced users to update the request.
updateJobRun(request)
if (logger.isDebugEnabled) {
logger.debug("Starting: " + function.commandDirectory + " > " + bsubCommand)
} else {
logger.info("Starting: " + bsubCommand)
}
function.deleteLogs()
function.deleteOutputs()
function.mkOutputDirectories()
runStatus = RunnerStatus.RUNNING
Retry.attempt(() => {
val reply = new submitReply
jobId = LibBat.lsb_submit(request, reply)
if (jobId < 0)
throw new QException(LibBat.lsb_sperror("Unable to submit job"))
}, 1, 5, 10)
jobStatusPath = IOUtils.absolute(new File(jobStatusDir, "." + jobId)).toString
logger.info("Submitted LSF job id: " + jobId)
} catch {
case e =>
runStatus = RunnerStatus.FAILED
try {
removeTemporaryFiles()
function.failOutputs.foreach(_.createNewFile())
writeStackTrace(e)
} catch {
case _ => /* ignore errors in the exception handler */
}
logger.error("Error: " + bsubCommand, e)
}
}
}
object Lsf706JobRunner extends Logging {
init()
/**
* Initialize the Lsf library.
*/
private def init() = {
if (LibBat.lsb_init("Queue") < 0)
throw new QException(LibBat.lsb_sperror("lsb_init() failed"))
}
/**
* Tries to stop any running jobs.
* @param runners Runners to stop.
*/
def tryStop(runners: List[JobRunner[_]]) = {
val lsfJobRunners = runners.filter(_.isInstanceOf[Lsf706JobRunner]).map(_.asInstanceOf[Lsf706JobRunner])
if (lsfJobRunners.size > 0) {
for (jobRunners <- lsfJobRunners.filterNot(_.jobId < 0).grouped(10)) {
try {
val njobs = jobRunners.size
val signalJobs = new signalBulkJobs
signalJobs.jobs = {
val p = new Memory(8 * njobs)
p.write(0, jobRunners.map(_.jobId).toArray, 0, njobs)
p
}
signalJobs.njobs = njobs
signalJobs.signal = 9
if (LibBat.lsb_killbulkjobs(signalJobs) < 0)
throw new QException(LibBat.lsb_sperror("lsb_killbulkjobs failed"))
} catch {
case e =>
logger.error("Unable to kill all jobs.", e)
}
try {
jobRunners.foreach(_.removeTemporaryFiles())
} catch {
case e => /* ignore */
}
}
}
}
}

View File

@ -7,16 +7,16 @@ import org.broadinstitute.sting.queue.util._
/**
* Runs jobs on an LSF compute cluster.
*/
class LsfJobRunner(val function: CommandLineFunction) extends DispatchJobRunner with Logging {
private var runStatus: RunnerStatus.Value = _
abstract class LsfJobRunner(val function: CommandLineFunction) extends DispatchJobRunner with Logging {
protected var runStatus: RunnerStatus.Value = _
var job: LsfJob = new LsfJob
var jobId = -1L
/** Which directory to use for the job status files. */
private def jobStatusDir = function.jobTempDir
protected def jobStatusDir = function.jobTempDir
/** A file to look for to validate that the function ran to completion. */
private var jobStatusPath: String = _
protected var jobStatusPath: String = _
/** A temporary job done file to let Queue know that the process ran successfully. */
private lazy val jobDoneFile = new File(jobStatusPath + ".done")
@ -25,76 +25,16 @@ class LsfJobRunner(val function: CommandLineFunction) extends DispatchJobRunner
private lazy val jobFailFile = new File(jobStatusPath + ".fail")
/** A generated exec shell script. */
private var exec: File = _
protected var exec: File = _
/** A generated pre-exec shell script. */
private var preExec: File = _
protected var preExec: File = _
/** A generated post-exec shell script. */
private var postExec: File = _
protected var postExec: File = _
/**
* Dispatches the function on the LSF cluster.
* @param function Command to run.
*/
def start() = {
try {
function.mkOutputDirectories()
// job.name = function.jobName TODO: Make setting the job name optional.
job.outputFile = function.jobOutputFile
job.errorFile = function.jobErrorFile
job.project = function.jobProject
job.queue = function.jobQueue
if (IOUtils.absolute(new File(".")) != function.commandDirectory)
job.workingDir = function.commandDirectory
job.extraBsubArgs ++= function.extraArgs
if (function.jobRestartable)
job.extraBsubArgs :+= "-r"
if (function.memoryLimit.isDefined)
job.extraBsubArgs ++= List("-R", "rusage[mem=" + function.memoryLimit.get + "]")
job.name = function.commandLine.take(1000)
exec = writeExec()
job.command = "sh " + exec
preExec = writePreExec()
job.preExecCommand = "sh " + preExec
postExec = writePostExec()
job.postExecCommand = "sh " + postExec
if (logger.isDebugEnabled) {
logger.debug("Starting: " + function.commandDirectory + " > " + job.bsubCommand.mkString(" "))
} else {
logger.info("Starting: " + job.bsubCommand.mkString(" "))
}
function.deleteLogs()
function.deleteOutputs()
runStatus = RunnerStatus.RUNNING
Retry.attempt(() => job.run(), 1, 5, 10)
jobStatusPath = IOUtils.absolute(new File(jobStatusDir, "." + job.bsubJobId)).toString
logger.info("Submitted LSF job id: " + job.bsubJobId)
} catch {
case e =>
runStatus = RunnerStatus.FAILED
try {
removeTemporaryFiles()
function.failOutputs.foreach(_.createNewFile())
writeStackTrace(e)
} catch {
case _ => /* ignore errors in the exception handler */
}
logger.error("Error: " + job.bsubCommand.mkString(" "), e)
}
}
// TODO: Full bsub command for debugging.
protected def bsubCommand = "bsub " + function.commandLine
/**
* Updates and returns the status by looking for job status files.
@ -114,12 +54,12 @@ class LsfJobRunner(val function: CommandLineFunction) extends DispatchJobRunner
if (jobFailFile.exists) {
removeTemporaryFiles()
runStatus = RunnerStatus.FAILED
logger.info("Error: " + job.bsubCommand.mkString(" "))
logger.info("Error: " + bsubCommand)
tailError()
} else if (jobDoneFile.exists) {
removeTemporaryFiles()
runStatus = RunnerStatus.DONE
logger.info("Done: " + job.bsubCommand.mkString(" "))
logger.info("Done: " + bsubCommand)
}
} catch {
case e =>
@ -131,7 +71,7 @@ class LsfJobRunner(val function: CommandLineFunction) extends DispatchJobRunner
} catch {
case _ => /* ignore errors in the exception handler */
}
logger.error("Error: " + job.bsubCommand.mkString(" "), e)
logger.error("Error: " + bsubCommand, e)
}
runStatus
@ -151,8 +91,8 @@ class LsfJobRunner(val function: CommandLineFunction) extends DispatchJobRunner
/**
* Outputs the last lines of the error logs.
*/
private def tailError() = {
val errorFile = if (job.errorFile != null) job.errorFile else job.outputFile
protected def tailError() = {
val errorFile = if (function.jobErrorFile != null) function.jobErrorFile else function.jobOutputFile
if (IOUtils.waitFor(errorFile, 120)) {
val tailLines = IOUtils.tail(errorFile, 100)
val nl = "%n".format()
@ -167,7 +107,7 @@ class LsfJobRunner(val function: CommandLineFunction) extends DispatchJobRunner
* optionally mount any automount directories on the node.
* @return the file path to the pre-exec.
*/
private def writeExec() = {
protected def writeExec() = {
IOUtils.writeTempFile(function.commandLine, ".exec", "", jobStatusDir)
}
@ -176,7 +116,7 @@ class LsfJobRunner(val function: CommandLineFunction) extends DispatchJobRunner
* optionally mount any automount directories on the node.
* @return the file path to the pre-exec.
*/
private def writePreExec() = {
protected def writePreExec() = {
val preExec = new StringBuilder
preExec.append("rm -f '%s/'.$LSB_JOBID.done%n".format(jobStatusDir))
@ -194,7 +134,7 @@ class LsfJobRunner(val function: CommandLineFunction) extends DispatchJobRunner
* Writes a post-exec file to create the status files.
* @return the file path to the post-exec.
*/
private def writePostExec() = {
protected def writePostExec() = {
val postExec = new StringBuilder
val touchDone = function.doneOutputs.map("touch '%s'%n".format(_)).mkString

View File

@ -26,6 +26,9 @@ class QGraph extends Logging {
private var shuttingDown = false
private val nl = "%n".format()
private val inProcessManager = new InProcessJobManager
private var commandLineManager: JobManager[CommandLineFunction, _<:JobRunner[CommandLineFunction]] = _
/**
* Adds a QScript created CommandLineFunction to the graph.
* @param command Function to add to the graph.
@ -261,6 +264,11 @@ class QGraph extends Logging {
*/
private def runJobs() = {
try {
if (settings.bsubAllJobs)
commandLineManager = new Lsf706JobManager
else
commandLineManager = new ShellJobManager
if (settings.startFromScratch) {
logger.info("Removing outputs from previous runs.")
foreachFunction(_.resetToPending(true))
@ -360,12 +368,9 @@ class QGraph extends Logging {
private def newRunner(f: QFunction) = {
f match {
case cmd: CommandLineFunction =>
if (settings.bsubAllJobs)
new LsfJobRunner(cmd)
else
new ShellJobRunner(cmd)
commandLineManager.create(cmd)
case inProc: InProcessFunction =>
new InProcessRunner(inProc)
inProcessManager.create(inProc)
case _ =>
throw new QException("Unexpected function: " + f)
}
@ -376,7 +381,7 @@ class QGraph extends Logging {
val emailMessage = new EmailMessage
emailMessage.from = settings.statusEmailFrom
emailMessage.to = settings.statusEmailTo
emailMessage.subject = "Queue function: Failure"
emailMessage.subject = "Queue function: Failure: " + settings.qSettings.jobNamePrefix
addFailedFunctions(emailMessage, failed)
emailMessage.trySend(settings.qSettings.emailSettings)
}
@ -412,9 +417,9 @@ class QGraph extends Logging {
emailMessage.to = settings.statusEmailTo
emailMessage.body = getStatus + nl
if (failed.size == 0) {
emailMessage.subject = "Queue run: Success"
emailMessage.subject = "Queue run: Success: " + settings.qSettings.jobNamePrefix
} else {
emailMessage.subject = "Queue run: Failure"
emailMessage.subject = "Queue run: Failure: " + settings.qSettings.jobNamePrefix
addFailedFunctions(emailMessage, failed)
}
emailMessage.trySend(settings.qSettings.emailSettings)
@ -754,25 +759,7 @@ class QGraph extends Logging {
*/
def shutdown() {
shuttingDown = true
val lsfJobRunners = getRunningJobs.filter(_.runner.isInstanceOf[LsfJobRunner]).map(_.runner.asInstanceOf[LsfJobRunner])
if (lsfJobRunners.size > 0) {
for (jobRunners <- lsfJobRunners.filterNot(_.job.bsubJobId == null).grouped(10)) {
try {
val bkill = new LsfKillJob(jobRunners.map(_.job))
logger.info(bkill.command)
bkill.run()
} catch {
case jee: JobExitException =>
logger.error("Unable to kill all jobs:%n%s".format(jee.getMessage))
case e =>
logger.error("Unable to kill jobs.", e)
}
try {
jobRunners.foreach(_.removeTemporaryFiles())
} catch {
case e => /* ignore */
}
}
}
if (commandLineManager != null)
commandLineManager.tryStop(getRunningJobs.map(_.runner))
}
}

View File

@ -0,0 +1,7 @@
package org.broadinstitute.sting.queue.engine
import org.broadinstitute.sting.queue.function.CommandLineFunction
class ShellJobManager extends JobManager[CommandLineFunction, ShellJobRunner] {
def create(function: CommandLineFunction) = new ShellJobRunner(function)
}

View File

@ -6,7 +6,7 @@ import org.broadinstitute.sting.queue.util.{JobExitException, Logging, ShellJob}
/**
* Runs jobs one at a time locally
*/
class ShellJobRunner(val function: CommandLineFunction) extends JobRunner with Logging {
class ShellJobRunner(val function: CommandLineFunction) extends JobRunner[CommandLineFunction] with Logging {
private var runStatus: RunnerStatus.Value = _
/**
@ -21,6 +21,9 @@ class ShellJobRunner(val function: CommandLineFunction) extends JobRunner with L
job.outputFile = function.jobOutputFile
job.errorFile = function.jobErrorFile
// Allow advanced users to update the job.
updateJobRun(job)
if (logger.isDebugEnabled) {
logger.debug("Starting: " + function.commandDirectory + " > " + function.commandLine)
} else {
@ -33,8 +36,8 @@ class ShellJobRunner(val function: CommandLineFunction) extends JobRunner with L
function.deleteLogs()
function.deleteOutputs()
runStatus = RunnerStatus.RUNNING
function.mkOutputDirectories()
runStatus = RunnerStatus.RUNNING
job.run()
function.doneOutputs.foreach(_.createNewFile())
runStatus = RunnerStatus.DONE

View File

@ -23,9 +23,6 @@ trait CommandLineFunction extends QFunction with Logging {
/** Job queue to run the command */
var jobQueue: String = _
/** Extra arguments to specify on the command line */
var extraArgs: List[String] = Nil
/**
* Returns set of directories required to run the command.
* @return Set of directories required to run the command.

View File

@ -37,6 +37,12 @@ trait QFunction extends Logging {
/** Order the function was added to the graph. */
var addOrder: List[Int] = Nil
/**
* A callback for modifying the run.
* NOTE: This function is for ADVANCED use only and is unsupported.
*/
var updateJobRun: PartialFunction[Any,Unit] = null
/** File to redirect any output. Defaults to <jobName>.out */
@Output(doc="File to redirect any output", required=false)
@Gather(classOf[SimpleTextGatherFunction])

View File

@ -1,135 +0,0 @@
package org.broadinstitute.sting.queue.util
import java.util.regex.Pattern
import collection.JavaConversions._
/**
* A job submitted to LSF. This class is designed to work somewhat like
* java.lang.Process, but has some extensions.
*
* @author A subset of the original BroadCore ported to scala by Khalid Shakir
*/
class LsfJob extends CommandLineJob with Logging {
var name: String = _
var project: String = _
var queue: String = _
var preExecCommand: String = _
var postExecCommand: String = _
var waitForCompletion = false
var extraBsubArgs: List[String] = Nil
var bsubJobId: String = _
/**
* Starts the job. Command must exist. The job will be submitted to LSF.
*/
def run() = {
assert(bsubJobId == null, "LSF job was already submitted")
assert(command != null, "Command was not set on LSF job")
assert(outputFile != null, "Output file must be set on LSF job")
// capture the output for debugging
val stdinSettings = new ProcessController.InputStreamSettings(null, null)
val stdoutSettings = new ProcessController.OutputStreamSettings(FIVE_MB, null, false)
val stderrSettings = new ProcessController.OutputStreamSettings(FIVE_MB, null, false)
// launch the bsub job from the current directory
val processSettings = new ProcessController.ProcessSettings(
bsubCommand, environmentVariables, null, stdinSettings, stdoutSettings, stderrSettings, false)
val bsubOutput = processController.exec(processSettings)
if (bsubOutput.exitValue != 0) {
throw new JobExitException("Failed to submit LSF job.", bsubCommand,
bsubOutput.exitValue, content(bsubOutput.stderr))
}
// get the LSF job ID
val matcher = LsfJob.JOB_ID.matcher(bsubOutput.stdout.content)
matcher.find()
bsubJobId = matcher.group
}
/**
* Generates the bsub command line for this LsfJob.
* @return command line as a Array[String]
*/
def bsubCommand = {
var args = List.empty[String]
args :+= "bsub"
if (name != null) {
args :+= "-J"
args :+= name
}
if (inputFile != null) {
args :+= "-i"
args :+= inputFile.getAbsolutePath
}
args :+= "-o"
args :+= outputFile.getAbsolutePath
if (errorFile != null) {
args :+= "-e"
args :+= errorFile.getAbsolutePath
}
if (queue != null) {
args :+= "-q"
args :+= queue
}
if (project != null) {
args :+= "-P"
args :+= project
}
if (preExecCommand != null) {
args :+= "-E"
args :+= preExecCommand
}
if (postExecCommand != null) {
args :+= "-Ep"
args :+= postExecCommand
}
if (workingDir != null) {
args :+= "-cwd"
args :+= workingDir.getPath
}
if (waitForCompletion) {
args :+= "-K"
}
args ++= extraBsubArgs
args :+= command
args.toArray
}
/**
* Get the list of environment variables and pass into the exec job. We strip
* out LD_ASSUME_KERNEL because it behaves badly when running bsub jobs across
* different versions of the linux OS.
*
* @return array of environment vars in 'name=value' format.
*/
private def environmentVariables =
System.getenv()
.filterNot{case (name, value) => name.equalsIgnoreCase("LD_ASSUME_KERNEL") || value == null}
.toMap
}
/**
* A job submitted to LSF. This class is designed to work somewhat like
* java.lang.Process, but has some extensions.
*
* @author A subset of the original BroadCore ported to scala by Khalid Shakir
*/
object LsfJob {
/** Used to search the stdout for the job id. */
private val JOB_ID = Pattern.compile("\\d+")
}

View File

@ -1,26 +0,0 @@
package org.broadinstitute.sting.queue.util
/**
* bkills a list of lsf jobs.
*/
class LsfKillJob(jobs: Traversable[LsfJob]) extends CommandLineJob with Logging {
command = "bkill " + jobs.map(_.bsubJobId).mkString(" ")
def run() = {
// capture the output for debugging
val stdinSettings = new ProcessController.InputStreamSettings(null, null)
val stdoutSettings = new ProcessController.OutputStreamSettings(FIVE_MB, null, false)
val stderrSettings = new ProcessController.OutputStreamSettings(FIVE_MB, null, false)
val bkillCommand = (List("bkill") ++ jobs.map(_.bsubJobId)).toArray
// launch the bsub job from the current directory
val processSettings = new ProcessController.ProcessSettings(
bkillCommand, null, null, stdinSettings, stdoutSettings, stderrSettings, false)
val bkillOutput = processController.exec(processSettings)
if (bkillOutput.exitValue != 0) {
throw new JobExitException("Failed to kill LSF jobs.", bkillCommand, bkillOutput.exitValue, content(bkillOutput.stderr))
}
}
}

View File

@ -15,33 +15,41 @@ class FullCallingPipelineTest extends BaseTest {
private final val validationReportsDataLocation = "/humgen/gsa-hpprojects/GATK/validationreports/submitted/"
val k1gChr20Dataset = {
val dataset = newK1gDataset
dataset.pipeline.getProject.setName("Barcoded_1000G_WEx_chr20")
val dataset = newK1gDataset("Barcoded_1000G_WEx_chr20")
dataset.pipeline.getProject.setIntervalList(new File(BaseTest.GATKDataLocation + "whole_exome_agilent_1.1_refseq_plus_3_boosters.Homo_sapiens_assembly19.targets.chr20.interval_list"))
dataset.validations :+= new PipelineValidation("evalHandFiltered.dbsnp.all.called.all.counter.nCalledLoci", "1390", "1420")
dataset.validations :+= new PipelineValidation("evalHandFiltered.dbsnp.all.called.all.titv.tiTvRatio", "3.52", "3.60")
dataset.validations :+= new PipelineValidation("evalHandFiltered.dbsnp.all.called.known.titv.tiTvRatio", "3.71", "3.80")
dataset.validations :+= new PipelineValidation("evalHandFiltered.dbsnp.all.called.novel.titv.tiTvRatio", "2.79", "2.86")
dataset.validations :+= new IntegerValidation("eval.dbsnp.all.called.all.counter.nCalledLoci", 1359)
dataset.validations :+= new IntegerValidation("eval.dbsnp.all.called.known.counter.nCalledLoci", 1134)
dataset.validations :+= new IntegerValidation("eval.dbsnp.all.called.novel.counter.nCalledLoci", 225)
dataset.validations :+= new DoubleValidation("eval.dbsnp.all.called.all.titv.tiTvRatio", 3.6701)
dataset.validations :+= new DoubleValidation("eval.dbsnp.all.called.known.titv.tiTvRatio", 3.7647)
dataset.validations :+= new DoubleValidation("eval.dbsnp.all.called.novel.titv.tiTvRatio", 3.2453)
dataset.jobQueue = "hour"
dataset
}
val k1gExomeDataset = {
val dataset = newK1gDataset
dataset.pipeline.getProject.setName("Barcoded_1000G_WEx")
val dataset = newK1gDataset("Barcoded_1000G_WEx")
dataset.pipeline.getProject.setIntervalList(new File(BaseTest.GATKDataLocation + "whole_exome_agilent_1.1_refseq_plus_3_boosters.Homo_sapiens_assembly19.targets.interval_list"))
dataset.validations :+= new PipelineValidation("evalHandFiltered.dbsnp.all.called.all.counter.nCalledLoci", "51969", "53019")
dataset.validations :+= new PipelineValidation("evalHandFiltered.dbsnp.all.called.all.titv.tiTvRatio", "3.18", "3.25")
dataset.validations :+= new PipelineValidation("evalHandFiltered.dbsnp.all.called.known.titv.tiTvRatio", "3.29", "3.36")
dataset.validations :+= new PipelineValidation("evalHandFiltered.dbsnp.all.called.novel.titv.tiTvRatio", "2.80", "2.87")
dataset.validations :+= new IntegerValidation("eval.dbsnp.all.called.all.counter.nCalledLoci", 51130)
dataset.validations :+= new IntegerValidation("eval.dbsnp.all.called.known.counter.nCalledLoci", 41042)
dataset.validations :+= new IntegerValidation("eval.dbsnp.all.called.novel.counter.nCalledLoci", 10088)
dataset.validations :+= new DoubleValidation("eval.dbsnp.all.called.all.titv.tiTvRatio", 3.2598)
dataset.validations :+= new DoubleValidation("eval.dbsnp.all.called.known.titv.tiTvRatio", 3.3307)
dataset.validations :+= new DoubleValidation("eval.dbsnp.all.called.novel.titv.tiTvRatio", 2.9937)
dataset.jobQueue = "gsa"
dataset.bigMemQueue = "gsa"
dataset
}
def newK1gDataset = {
def newK1gDataset(projectName: String) = {
val project = new PipelineProject
project.setName(projectName)
project.setReferenceFile(new File(BaseTest.hg19Reference))
project.setDbsnpFile(new File(BaseTest.b37dbSNP129))
@ -52,7 +60,7 @@ class FullCallingPipelineTest extends BaseTest {
var samples = List.empty[PipelineSample]
for (id <- ids) {
val sample = new PipelineSample
sample.setId(project.getName + "_" + id)
sample.setId(projectName + "_" + id)
sample.setBamFiles(Map("recalibrated" -> new File("/seq/picard_aggregation/%1$s/%2$s/v6/%2$s.bam".format(squid,id))))
sample.setTags(Map("SQUIDProject" -> squid, "CollaboratorID" -> id))
samples :+= sample
@ -66,12 +74,11 @@ class FullCallingPipelineTest extends BaseTest {
dataset.pipeline = pipeline
dataset.refseq = BaseTest.hg19Refseq
dataset.targetTiTv = "3.0"
dataset.bigMemQueue = "gsa"
dataset
}
@DataProvider(name="datasets")
@DataProvider(name="datasets")//, parallel=true)
final def convertDatasets: Array[Array[AnyRef]] =
datasets.map(dataset => Array(dataset.asInstanceOf[AnyRef])).toArray
@ -80,13 +87,19 @@ class FullCallingPipelineTest extends BaseTest {
val projectName = dataset.pipeline.getProject.getName
val testName = "fullCallingPipeline-" + projectName
val yamlFile = writeTempYaml(dataset.pipeline)
var cleanType = "cleaned"
// Run the pipeline with the expected inputs.
var pipelineCommand = ("-jobProject %s -S scala/qscript/fullCallingPipeline.q -Y %s" +
" -refseqTable %s" +
" --gatkjar %s/dist/GenomeAnalysisTK.jar -titv %s -skipCleaning")
.format(projectName, yamlFile, dataset.refseq, new File(".").getCanonicalPath, dataset.targetTiTv)
var pipelineCommand = ("-retry 1 -S scala/qscript/fullCallingPipeline.q" +
" -jobProject %s -Y %s -refseqTable %s -titv %s" +
" --gatkjar %s/dist/GenomeAnalysisTK.jar")
.format(projectName, yamlFile, dataset.refseq, dataset.targetTiTv, new File(".").getAbsolutePath)
if (!dataset.runIndelRealigner) {
pipelineCommand += " -skipCleaning"
cleanType = "uncleaned"
}
if (dataset.jobQueue != null)
pipelineCommand += " -jobQueue " + dataset.jobQueue
@ -98,25 +111,22 @@ class FullCallingPipelineTest extends BaseTest {
// If actually running, evaluate the output validating the expressions.
if (PipelineTest.run) {
// path where the pipeline should have output the uncleaned handfiltered vcf
val handFilteredVcf = PipelineTest.runDir(testName) + "SnpCalls/%s.uncleaned.annotated.handfiltered.vcf".format(projectName)
// path where the pipeline should have outout the indel masked vcf
val optimizedVcf = PipelineTest.runDir(testName) + "SnpCalls/%s.uncleaned.annotated.indel.masked.recalibrated.tranched.vcf".format(projectName)
// path where the pipeline should have output the handfiltered vcf
val handFilteredVcf = PipelineTest.runDir(testName) + "SnpCalls/%s.%s.annotated.handfiltered.vcf".format(projectName, cleanType)
// eval modules to record in the validation directory
val evalModules = List("CompOverlap", "CountFunctionalClasses", "CountVariants", "SimpleMetricsBySample", "TiTvVariantEvaluator")
// write the report to the shared validation data location
val formatter = new SimpleDateFormat("yyyy.MM.dd.HH.mm.ss")
val reportLocation = "%s/%s/validation.%s.eval".format(validationReportsDataLocation, testName, formatter.format(new Date))
val reportLocation = "%s%s/validation.%s.eval".format(validationReportsDataLocation, testName, formatter.format(new Date))
new File(reportLocation).getParentFile.mkdirs
// Run variant eval generating the report and validating the pipeline vcfs.
var walkerCommand = ("-T VariantEval -R %s -D %s -B:evalOptimized,VCF %s -B:evalHandFiltered,VCF %s" +
var walkerCommand = ("-T VariantEval -R %s -D %s -B:eval,VCF %s" +
" -E %s -reportType R -reportLocation %s -L %s")
.format(
dataset.pipeline.getProject.getReferenceFile, dataset.pipeline.getProject.getDbsnpFile, optimizedVcf, handFilteredVcf,
dataset.pipeline.getProject.getReferenceFile, dataset.pipeline.getProject.getDbsnpFile, handFilteredVcf,
evalModules.mkString(" -E "), reportLocation, dataset.pipeline.getProject.getIntervalList)
for (validation <- dataset.validations) {
@ -135,16 +145,24 @@ class FullCallingPipelineTest extends BaseTest {
var targetTiTv: String = null,
var validations: List[PipelineValidation] = Nil,
var jobQueue: String = null,
var bigMemQueue: String = null) {
var bigMemQueue: String = null,
var runIndelRealigner: Boolean = false) {
override def toString = pipeline.getProject.getName
}
class PipelineValidation(
var metric: String = null,
var min: String = null,
var max: String = null) {
class PipelineValidation(val metric: String, val min: String, val max: String) {
}
class IntegerValidation(metric: String, target: Int)
extends PipelineValidation(metric,
(target * .99).floor.toInt.toString, (target * 1.01).ceil.toInt.toString) {
}
class DoubleValidation(metric: String, target: Double)
extends PipelineValidation(metric,
"%.2f".format((target * 99).floor / 100), "%.2f".format((target * 101).ceil / 100)) {
}
private def writeTempYaml(pipeline: Pipeline) = {
val tempFile = File.createTempFile(pipeline.getProject.getName + "-", ".yaml")
tempFile.deleteOnExit

View File

@ -10,6 +10,7 @@
<ibiblio name="libraries_with_inconsistent_poms" checkconsistency="false" m2compatible="true" />
<ibiblio name="reflections-repo" m2compatible="true" root="http://reflections.googlecode.com/svn/repo" />
<ibiblio name="java.net" m2compatible="false" root="http://download.java.net/maven/1/" pattern="[organisation]/jars/[artifact]-[revision].[ext]"/>
<ibiblio name="maven2-repository.dev.java.net" m2compatible="true" root="http://download.java.net/maven/2/" />
</resolvers>
<modules>
<module organisation="edu.mit.broad" resolver="projects" />
@ -22,5 +23,6 @@
<module organisation="colt" module="colt" resolver="libraries_with_inconsistent_poms" />
<module organisation="javax.mail" resolver="java.net" />
<module organisation="javax.activation" resolver="java.net" />
<module organisation="net.java.dev.jna" resolver="maven2-repository.dev.java.net" />
</modules>
</ivysettings>