Added the ability to test pipelines in dry or live mode via 'ant pipelinetest' and 'ant pipelinetest -Dpipeline.run=run'.

Added an initial test for genotyping chr20 on ten 1000G bams.
Since tribble needs logging support too, for now setting the logging level and appending the console logger to the root logger, not just to "org.broadinstitute.sting".
Updated IntervalUtilsUnitTest to output to a temp directory and not the SVN controlled testdata directory.
Added refseq tables and dbsnps to validation data in BaseTest.
Now waiting up to two minutes for gather parts to propagate over NFS before attempting to merge the files.
Setting scatter/gather directories relative to the -run directory instead of the current directory that queue is running.
Fixed a bug where escaping test expressions didn't handle delimiters at the beginning or end of the String.


git-svn-id: file:///humgen/gsa-scr1/gsa-engineering/svn_contents/trunk@4717 348d0f76-0448-11de-a6fe-93d51630548a
This commit is contained in:
kshakir 2010-11-22 22:59:42 +00:00
parent 187b464ded
commit 787e5d85e9
18 changed files with 340 additions and 55 deletions

View File

@ -162,6 +162,11 @@
</or>
</condition>
<!-- Get the pipeline run type. Default to dry. -->
<condition property="pipeline.run" value="dry" else="${pipeline.run}">
<equals arg1="${pipeline.run}" arg2="$${pipeline.run}" />
</condition>
<echo message="GATK build : ${gatk.target}"/>
<echo message="Queue build : ${queue.target}"/>
<echo message="source revision : ${build.version}"/>
@ -507,6 +512,7 @@
listeners="org.testng.reporters.FailedReporter,org.testng.reporters.JUnitXMLReporter,org.broadinstitute.sting.StingTextReporter">
<jvmarg value="-Xmx${test.maxmemory}" />
<jvmarg value="-Djava.awt.headless=true" />
<jvmarg value="-Dpipeline.run=${pipeline.run}" />
<!--
<jvmarg value="-Xdebug"/>
<jvmarg value="-Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=5005"/>
@ -554,6 +560,12 @@
</condition>
<run-test testtype="${ptype}"/>
</target>
<target name="pipelinetest" depends="test.compile" description="Run pipeline tests">
<condition property="pipetype" value="*PipelineTest" else="${single}">
<not><isset property="single"/></not>
</condition>
<run-test testtype="${pipetype}"/>
</target>
<!-- ***************************************************************************** -->
<!-- *********** Tribble ********* -->
@ -686,6 +698,7 @@
<delete dir="lib"/>
<delete dir="staging"/>
<delete dir="${dist.dir}"/>
<delete dir="pipelinetests"/>
<!-- When we clean, make sure to clean out tribble as well -->
<ant antfile="tribble/build.xml" target="clean"/>
</target>

View File

@ -288,7 +288,7 @@ public abstract class CommandLineProgram {
throw new ArgumentException("Unable to match: " + logging_level + " to a logging level, make sure it's a valid level (INFO, DEBUG, ERROR, FATAL, OFF)");
}
CommandLineUtils.getStingLogger().setLevel(par);
Logger.getRootLogger().setLevel(par);
}
/**

View File

@ -166,8 +166,8 @@ public class CommandLineUtils {
}
}
// Extracted from BasicConfigurator.configure(), but only applied to the Sting logger.
getStingLogger().addAppender(new ConsoleAppender(
new PatternLayout(PatternLayout.TTCC_CONVERSION_PATTERN)));
Logger.getRootLogger().addAppender(new ConsoleAppender(
new PatternLayout(PatternLayout.TTCC_CONVERSION_PATTERN)));
}
/**
@ -177,8 +177,10 @@ public class CommandLineUtils {
*/
@SuppressWarnings("unchecked")
public static void setLayout(Logger logger, PatternLayout layout) {
Enumeration<Appender> e = (Enumeration<Appender>) logger.getAllAppenders();
for (Appender appender: Collections.list(e))
appender.setLayout(layout);
for (; logger != null; logger = (Logger)logger.getParent()) {
Enumeration<Appender> e = (Enumeration<Appender>) logger.getAllAppenders();
for (Appender appender: Collections.list(e))
appender.setLayout(layout);
}
}
}

View File

@ -337,11 +337,18 @@ public class Utils {
private static String[] escapeExpressions(String args, String delimiter) {
String[] command = {};
String[] split = args.split(delimiter);
String arg;
for (int i = 0; i < split.length - 1; i += 2) {
command = Utils.concatArrays(command, split[i].trim().split(" "));
arg = split[i].trim();
if (arg.length() > 0) // if the unescaped arg has a size
command = Utils.concatArrays(command, arg.split(" "));
command = Utils.concatArrays(command, new String[]{split[i + 1]});
}
return Utils.concatArrays(command, split[split.length - 1].trim().split(" "));
arg = split[split.length - 1].trim();
if (split.length % 2 == 1) // if the command ends with a delimiter
if (arg.length() > 0) // if the last unescaped arg has a size
command = Utils.concatArrays(command, arg.split(" "));
return command;
}
/**

View File

@ -50,6 +50,18 @@ public abstract class BaseTest {
public static final String validationDataLocation = GATKDataLocation + "Validation_Data/";
public static final String evaluationDataLocation = GATKDataLocation + "Evaluation_Data/";
public static final String comparisonDataLocation = GATKDataLocation + "Comparisons/";
public static final String annotationDataLocation = GATKDataLocation + "Annotations/";
public static final String refseqAnnotationLocation = annotationDataLocation + "refseq/";
public static final String hg18Refseq = refseqAnnotationLocation + "refGene-big-table-hg18.txt";
public static final String hg19Refseq = refseqAnnotationLocation + "refGene-big-table-hg19.txt";
public static final String b36Refseq = refseqAnnotationLocation + "refGene-big-table-b36.txt";
public static final String b37Refseq = refseqAnnotationLocation + "refGene-big-table-b37.txt";
public static final String dbsnpDataLocation = GATKDataLocation;
public static final String hg18dbSNP129 = dbsnpDataLocation + "dbsnp_129_hg18.rod";
public static final String b36dbSNP129 = dbsnpDataLocation + "dbsnp_129_b36.rod";
public static final String b37dbSNP129 = dbsnpDataLocation + "dbsnp_129_b37.rod";
public final String testDir = "testdata/";

View File

@ -315,15 +315,32 @@ public class WalkerTest extends BaseTest {
* @param md5s the list of md5s
* @param tmpFiles the temp file corresponding to the md5 list
* @param args the argument list
* @param expectedException the expected exception or null
* @return a pair of file and string lists
*/
private Pair<List<File>, List<String>> executeTest(String name, List<String> md5s, List<File> tmpFiles, String args, Class expectedException) {
if (outputFileLocation != null)
args += " -o " + this.outputFileLocation.getAbsolutePath();
executeTest(name, args, expectedException);
if ( expectedException != null ) {
return null;
} else {
// we need to check MD5s
return new Pair<List<File>, List<String>>(tmpFiles, assertMatchingMD5s(name, tmpFiles, md5s));
}
}
/**
* execute the test, given the following:
* @param name the name of the test
* @param args the argument list
* @param expectedException the expected exception or null
*/
public static void executeTest(String name, String args, Class expectedException) {
CommandLineGATK instance = new CommandLineGATK();
String[] command = Utils.escapeExpressions(args);
if (outputFileLocation != null)
command = Utils.appendArray(command, "-o", this.outputFileLocation.getAbsolutePath());
// add the logging level to each of the integration test commands
command = Utils.appendArray(command, "-l", "WARN", "-et", ENABLE_REPORTING ? "STANDARD" : "NO_ET");
@ -356,14 +373,10 @@ public class WalkerTest extends BaseTest {
if ( ! gotAnException )
// we expected an exception but didn't see it
Assert.fail(String.format("Test %s expected exception %s but none was thrown", name, expectedException.toString()));
return null;
} else {
if ( CommandLineExecutable.result != 0) {
throw new RuntimeException("Error running the GATK with arguments: " + args);
}
// we need to check MD5s
return new Pair<List<File>, List<String>>(tmpFiles, assertMatchingMD5s(name, tmpFiles, md5s));
}
}

View File

@ -82,4 +82,38 @@ public class UtilsUnitTest extends BaseTest {
Assert.assertTrue("one-1;two-2;three-1;four-2;five-1;six-2".equals(joined));
}
@Test
public void testEscapeExpressions() {
String[] expected, actual;
expected = new String[] {"one", "two", "three four", "five", "six"};
actual = Utils.escapeExpressions("one two 'three four' five six");
Assert.assertEquals(actual, expected);
actual = Utils.escapeExpressions(" one two 'three four' five six");
Assert.assertEquals(actual, expected);
actual = Utils.escapeExpressions("one two 'three four' five six ");
Assert.assertEquals(actual, expected);
actual = Utils.escapeExpressions(" one two 'three four' five six ");
Assert.assertEquals(actual, expected);
expected = new String[] {"one two", "three", "four"};
actual = Utils.escapeExpressions("'one two' three four");
Assert.assertEquals(actual, expected);
actual = Utils.escapeExpressions(" 'one two' three four");
Assert.assertEquals(actual, expected);
actual = Utils.escapeExpressions("'one two' three four ");
Assert.assertEquals(actual, expected);
actual = Utils.escapeExpressions(" 'one two' three four ");
Assert.assertEquals(actual, expected);
expected = new String[] {"one", "two", "three four"};
actual = Utils.escapeExpressions("one two 'three four'");
Assert.assertEquals(actual, expected);
actual = Utils.escapeExpressions(" one two 'three four'");
Assert.assertEquals(actual, expected);
actual = Utils.escapeExpressions("one two 'three four' ");
Assert.assertEquals(actual, expected);
actual = Utils.escapeExpressions(" one two 'three four' ");
Assert.assertEquals(actual, expected);
}
}

View File

@ -12,6 +12,7 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@ -191,6 +192,12 @@ public class IntervalUtilsUnitTest extends BaseTest {
Assert.assertEquals(locs3.get(0), chr3);
}
@Test(enabled=false) // disabled, GenomeLoc.compareTo() returns 0 for two locs with the same start, causing an exception in GLSS.add().
public void testScatterIntervalsWithTheSameStart() {
List<File> files = testFiles("sg.", 20, ".intervals");
IntervalUtils.scatterIntervalArguments(new File(hg18Reference), Arrays.asList(BaseTest.GATKDataLocation + "whole_exome_agilent_designed_120.targets.hg18.chr20.interval_list"), files, false);
}
@Test
public void testScatterOrder() {
List<String> intervals = Arrays.asList("chr2:1-1", "chr1:1-1", "chr3:2-2");
@ -348,9 +355,16 @@ public class IntervalUtilsUnitTest extends BaseTest {
}
private List<File> testFiles(String prefix, int count, String suffix) {
ArrayList<File> files = new ArrayList<File>();
for (int i = 1; i <= count; i++)
files.add(new File(testDir + prefix + i + suffix));
return files;
try {
ArrayList<File> files = new ArrayList<File>();
for (int i = 1; i <= count; i++) {
File tmpFile = File.createTempFile(prefix + i, suffix);
tmpFile.deleteOnExit();
files.add(tmpFile);
}
return files;
} catch (IOException e) {
throw new UserException.BadTmpDir("Unable to create temp file: " + e);
}
}
}

View File

@ -3,7 +3,6 @@ package org.broadinstitute.sting.queue.engine
import java.io.File
import org.broadinstitute.sting.queue.function.CommandLineFunction
import org.broadinstitute.sting.queue.util._
import org.apache.commons.io.FileUtils
/**
* Runs jobs on an LSF compute cluster.
@ -154,7 +153,7 @@ class LsfJobRunner(val function: CommandLineFunction) extends DispatchJobRunner
*/
private def tailError() = {
val errorFile = if (job.errorFile != null) job.errorFile else job.outputFile
if (FileUtils.waitFor(errorFile, 120)) {
if (IOUtils.waitFor(errorFile, 120)) {
val tailLines = IOUtils.tail(errorFile, 100)
val nl = "%n".format()
logger.error("Last %d lines of %s:%n%s".format(tailLines.size, errorFile, tailLines.mkString(nl)))

View File

@ -11,6 +11,7 @@ import org.apache.commons.io.{LineIterator, IOUtils, FileUtils}
*/
class VcfGatherFunction extends GatherFunction with InProcessFunction {
def run() = {
waitForGatherParts
if (gatherParts.size < 1) {
throw new QException("No files to gather to output: " + originalOutput)
} else {

View File

@ -151,8 +151,7 @@ trait QFunction extends Logging {
def outputDirectories = {
var dirs = Set.empty[File]
dirs += commandDirectory
if (jobTempDir != null)
dirs += jobTempDir
dirs += jobTempDir
dirs ++= outputs.map(_.getParentFile)
dirs
}
@ -287,10 +286,18 @@ trait QFunction extends Logging {
if (jobTempDir == null)
jobTempDir = qSettings.tempDirectory
// If the command directory is relative, insert the run directory ahead of it.
commandDirectory = IOUtils.absolute(new File(qSettings.runDirectory, commandDirectory.getPath))
// Do not set the temp dir relative to the command directory
jobTempDir = IOUtils.absolute(jobTempDir)
absoluteCommandDirectory()
}
/**
* If the command directory is relative, insert the run directory ahead of it.
*/
def absoluteCommandDirectory() =
commandDirectory = IOUtils.absolute(qSettings.runDirectory, commandDirectory)
/**
* Makes all field values canonical so that the graph can match the
* inputs of one function to the output of another using equals().
@ -330,7 +337,6 @@ trait QFunction extends Logging {
*/
private def absolute(file: File) = IOUtils.absolute(commandDirectory, file)
/**
* Scala sugar type for checking annotation required and exclusiveOf.
*/

View File

@ -3,6 +3,8 @@ package org.broadinstitute.sting.queue.function.scattergather
import java.io.File
import org.broadinstitute.sting.commandline.{Input, Output}
import org.broadinstitute.sting.queue.function.QFunction
import org.broadinstitute.sting.queue.QException
import org.broadinstitute.sting.queue.util.IOUtils
/**
* Base class for Gather command line functions.
@ -13,4 +15,13 @@ trait GatherFunction extends QFunction {
@Output(doc="The original output of the scattered function")
var originalOutput: File = _
/**
* Waits for gather parts to propagate over NFS or throws an exception.
*/
protected def waitForGatherParts = {
val missing = IOUtils.waitFor(gatherParts, 120)
if (!missing.isEmpty)
throw new QException("Unable to find gather inputs: " + missing.mkString(", "))
}
}

View File

@ -121,6 +121,10 @@ trait ScatterGatherableFunction extends CommandLineFunction {
// Allow the script writer to change the paths to the files.
initCloneFunction(cloneFunction, i)
// If the command directory is relative, insert the run directory ahead of it.
cloneFunction.absoluteCommandDirectory()
// Get absolute paths to the files and bind the sg functions to the clone function via the absolute paths.
scatterFunction.bindCloneInputs(cloneFunction, i)
for (gatherField <- outputFieldsWithValues) {

View File

@ -10,6 +10,7 @@ import org.apache.commons.io.{LineIterator, IOUtils, FileUtils}
*/
class SimpleTextGatherFunction extends GatherFunction with InProcessFunction {
def run() = {
waitForGatherParts
if (gatherParts.size < 1) {
throw new QException("No files to gather to output: " + originalOutput)
} else if (gatherParts.size == 1) {

View File

@ -41,14 +41,67 @@ object IOUtils extends Logging {
absolute(temp)
}
/**
* Writes content into a file.
* @param file File to write to.
* @param content Content to write.
*/
def writeContents(file: File, content: String) = FileUtils.writeStringToFile(file, content)
def writeTempFile(content: String, prefix: String, suffix: String = "", directory: File) = {
/**
* Writes content to a temp file and returns the path to the temporary file.
* @param content to write.
* @param prefix Prefix for the temp file.
* @parm suffix Suffix for the temp file.
* @param directory Directory for the temp file.
* @return the path to the temp file.
*/
def writeTempFile(content: String, prefix: String, suffix: String, directory: File) = {
val tempFile = absolute(File.createTempFile(prefix, suffix, directory))
writeContents(tempFile, content)
tempFile
}
/**
* Waits for NFS to propagate a file creation, imposing a timeout.
*
* Based on Apache Commons IO FileUtils.waitFor()
*
* @param file The file to wait for.
* @param seconds The maximum time in seconds to wait.
* @return true if the file exists
*/
def waitFor(file: File, seconds: Int): Boolean = waitFor(List(file), seconds).isEmpty
/**
* Waits for NFS to propagate a file creation, imposing a timeout.
*
* Based on Apache Commons IO FileUtils.waitFor()
*
* @param files The list of files to wait for.
* @param seconds The maximum time in seconds to wait.
* @return Files that still do not exists at the end of the timeout, or a empty list if all files exists.
*/
def waitFor[T <: Traversable[File]](files: T, seconds: Int): Traversable[File] = {
var timeout = 0;
var tick = 0;
var missingFiles = files.filterNot(_.exists)
while (!missingFiles.isEmpty && timeout <= seconds) {
if (tick >= 10) {
tick = 0;
timeout += 1
}
tick += 1
try {
Thread.sleep(100)
} catch {
case ignore: InterruptedException =>
}
missingFiles = missingFiles.filterNot(_.exists)
}
missingFiles
}
/**
* Returns the directory at the number of levels deep.
* For example 2 levels of /path/to/dir will return /path/to

View File

@ -1,19 +0,0 @@
package org.broadinstitute.sting.queue
import org.testng.annotations.Test
import org.broadinstitute.sting.BaseTest
/**
* A temporary integration test to ensure that the full calling pipeline compiles. To be enhanced...
*/
class FullCallingPipelineIntegrationTest extends QScriptTest {
@Test
def testCompileFullCallingPipeline = {
val command = ("-jobProject QueuePipelineTest -S %1$sscala/qscript/fullCallingPipeline.q -Y %2$s " +
"-refseqTable /humgen/gsa-hpprojects/GATK/data/Annotations/refseq/refGene-big-table-hg18.txt " +
"--gatkjar %1$sdist/GenomeAnalysisTK.jar -titv 3.0 -skipCleaning").format(
stingDir, BaseTest.validationDataLocation + "QueuePipelineTestData/QueuePipelineTestData.yaml"
)
executeTest("fullCallingPipeline", command, null)
}
}

View File

@ -0,0 +1,132 @@
package org.broadinstitute.sting.queue.pipeline
import org.testng.annotations.{DataProvider, Test}
import collection.JavaConversions._
import java.io.File
import org.broadinstitute.sting.datasources.pipeline.{PipelineSample, PipelineProject, Pipeline}
import org.broadinstitute.sting.utils.yaml.YamlUtils
import org.broadinstitute.sting.queue.PipelineTest
import org.broadinstitute.sting.{WalkerTest, BaseTest}
import java.text.SimpleDateFormat
import java.util.Date
class FullCallingPipelineTest extends BaseTest {
def datasets = List(k1gChr20Dataset)
private final val validationReportsDataLocation = "/humgen/gsa-hpprojects/GATK/validationreports/submitted/"
val k1gChr20Dataset = {
val project = new PipelineProject
project.setName("Barcoded_1000G_WEx_chr20")
project.setReferenceFile(new File(BaseTest.hg19Reference))
project.setDbsnpFile(new File(BaseTest.b37dbSNP129))
project.setIntervalList(new File(BaseTest.GATKDataLocation + "whole_exome_agilent_1.1_refseq_plus_3_boosters.Homo_sapiens_assembly19.targets.chr20.interval_list"))
val squid = "C426"
val ids = List(
"NA19651","NA19655","NA19669","NA19834","HG01440",
"NA12342","NA12748","NA19649","NA19652","NA19654")
var samples = List.empty[PipelineSample]
for (id <- ids) {
val sample = new PipelineSample
sample.setId(project.getName + "_" + id)
sample.setBamFiles(Map("recalibrated" -> new File("/seq/picard_aggregation/%1$s/%2$s/v6/%2$s.bam".format(squid,id))))
sample.setTags(Map("SQUIDProject" -> squid, "CollaboratorID" -> id))
samples :+= sample
}
val pipeline = new Pipeline
pipeline.setProject(project)
pipeline.setSamples(samples)
val dataset = new PipelineDataset
dataset.pipeline = pipeline
dataset.refseq = BaseTest.hg19Refseq
dataset.targetTiTv = "3.0"
dataset.jobQueue = "hour"
dataset.validations :+= new PipelineValidation("evalHandFiltered.dbsnp.all.called.all.counter.nCalledLoci", "1390", "1420")
dataset.validations :+= new PipelineValidation("evalHandFiltered.dbsnp.all.called.all.titv.tiTvRatio", "3.52", "3.60")
dataset.validations :+= new PipelineValidation("evalHandFiltered.dbsnp.all.called.known.titv.tiTvRatio", "3.71", "3.80")
dataset.validations :+= new PipelineValidation("evalHandFiltered.dbsnp.all.called.novel.titv.tiTvRatio", "2.79", "2.86")
dataset
}
@DataProvider(name="datasets")
final def convertDatasets: Array[Array[AnyRef]] =
datasets.map(dataset => Array(dataset.asInstanceOf[AnyRef])).toArray
@Test(dataProvider="datasets")
def testFullCallingPipeline(dataset: PipelineDataset) = {
val projectName = dataset.pipeline.getProject.getName
val testName = "fullCallingPipeline-" + projectName
val yamlFile = writeTempYaml(dataset.pipeline)
// Run the pipeline with the expected inputs.
var pipelineCommand = ("-jobProject %s -S scala/qscript/fullCallingPipeline.q -Y %s" +
" -refseqTable %s" +
" --gatkjar %s/dist/GenomeAnalysisTK.jar -titv %s -skipCleaning")
.format(projectName, yamlFile, dataset.refseq, new File(".").getCanonicalPath, dataset.targetTiTv)
if (dataset.jobQueue != null)
pipelineCommand += " -jobQueue " + dataset.jobQueue
// Run the test, at least checking if the command compiles
PipelineTest.executeTest(testName, pipelineCommand, null)
// If actually running, evaluate the output validating the expressions.
if (PipelineTest.run) {
// path where the pipeline should have output the uncleaned handfiltered vcf
val handFilteredVcf = PipelineTest.runDir(testName) + "SnpCalls/%s.uncleaned.annotated.handfiltered.vcf".format(projectName)
// path where the pipeline should have outout the indel masked vcf
val optimizedVcf = PipelineTest.runDir(testName) + "SnpCalls/%s.uncleaned.annotated.indel.masked.recalibrated.tranched.vcf".format(projectName)
// eval modules to record in the validation directory
val evalModules = List("CountFunctionalClasses", "CompOverlap", "CountVariants", "TiTvVariantEvaluator")
// write the report to the shared validation data location
val formatter = new SimpleDateFormat("yyyy.MM.dd.HH.mm.ss")
val reportLocation = "%s/%s/validation.%s.eval".format(validationReportsDataLocation, testName, formatter.format(new Date))
new File(reportLocation).getParentFile.mkdirs
// Run variant eval generating the report and validating the pipeline vcfs.
var walkerCommand = ("-T VariantEval -R %s -D %s -B:evalOptimized,VCF %s -B:evalHandFiltered,VCF %s" +
" -E %s -reportType R -reportLocation %s -L %s")
.format(
dataset.pipeline.getProject.getReferenceFile, dataset.pipeline.getProject.getDbsnpFile, optimizedVcf, handFilteredVcf,
evalModules.mkString(" -E "), reportLocation, dataset.pipeline.getProject.getIntervalList)
for (validation <- dataset.validations) {
walkerCommand += " -summary %s".format(validation.metric)
walkerCommand += " -validate '%1$s >= %2$s' -validate '%1$s <= %3$s'".format(
validation.metric, validation.min, validation.max)
}
WalkerTest.executeTest("fullCallingPipelineValidate-" + projectName, walkerCommand, null)
}
}
class PipelineDataset(
var pipeline: Pipeline = null,
var refseq: String = null,
var targetTiTv: String = null,
var validations: List[PipelineValidation] = Nil,
var jobQueue: String = null) {
override def toString = pipeline.getProject.getName
}
class PipelineValidation(
var metric: String = null,
var min: String = null,
var max: String = null) {
}
private def writeTempYaml(pipeline: Pipeline) = {
val tempFile = File.createTempFile(pipeline.getProject.getName + "-", ".yaml")
tempFile.deleteOnExit
YamlUtils.dump(pipeline, tempFile)
tempFile
}
}

View File

@ -3,12 +3,12 @@ package org.broadinstitute.sting.queue
import org.broadinstitute.sting.utils.Utils
import org.testng.Assert
import org.broadinstitute.sting.commandline.CommandLineProgram
import org.broadinstitute.sting.BaseTest
import org.broadinstitute.sting.queue.util.ProcessController
class QScriptTest extends BaseTest {
object PipelineTest {
private var runningCommandLines = Set.empty[QCommandLine]
protected val stingDir = "./"
val run = System.getProperty("pipeline.run") == "run"
/**
* execute the test
@ -21,13 +21,15 @@ class QScriptTest extends BaseTest {
// add the logging level to each of the integration test commands
command = Utils.appendArray(command, "-l", "WARN", "-startFromScratch", "-tempDir", "integrationtests")
command = Utils.appendArray(command, "-l", "WARN", "-startFromScratch", "-tempDir", tempDir(name), "-runDir", runDir(name))
if (run)
command = Utils.appendArray(command, "-run", "-bsub")
// run the executable
var gotAnException = false
val instance = new QCommandLine
QScriptTest.runningCommandLines += instance
runningCommandLines += instance
try {
println("Executing test %s with Queue arguments: %s".format(name, Utils.join(" ",command)))
CommandLineProgram.start(instance, command)
@ -51,7 +53,7 @@ class QScriptTest extends BaseTest {
}
} finally {
instance.shutdown()
QScriptTest.runningCommandLines -= instance
runningCommandLines -= instance
}
// catch failures from the integration test
@ -64,10 +66,10 @@ class QScriptTest extends BaseTest {
throw new RuntimeException("Error running the GATK with arguments: " + args)
}
}
}
object QScriptTest {
private var runningCommandLines = Set.empty[QCommandLine]
def testDir(testName: String) = "pipelinetests/%s/".format(testName)
def runDir(testName: String) = testDir(testName) + "run/"
def tempDir(testName: String) = testDir(testName) + "temp/"
Runtime.getRuntime.addShutdownHook(new Thread {
/** Cleanup as the JVM shuts down. */