Merge branch 'master' of ssh://nickel.broadinstitute.org/humgen/gsa-scr1/gsa-engineering/git/unstable

This commit is contained in:
Guillermo del Angel 2011-08-22 20:39:32 -04:00
commit c270384b2e
43 changed files with 3059 additions and 476 deletions

View File

@ -374,7 +374,7 @@ class RodBindingArgumentTypeDescriptor extends ArgumentTypeDescriptor {
FeatureManager.FeatureDescriptor featureDescriptor = manager.getByFiletype(file);
if ( featureDescriptor != null ) {
tribbleType = featureDescriptor.getName();
logger.warn("Dynamically determined type of " + file + " to be " + tribbleType);
logger.info("Dynamically determined type of " + file + " to be " + tribbleType);
}
}

View File

@ -961,7 +961,7 @@ public class GenomeAnalysisEngine {
/**
* Get the list of intervals passed to the engine.
* @return List of intervals.
* @return List of intervals, or null if no intervals are in use
*/
public GenomeLocSortedSet getIntervals() {
return this.intervals;

View File

@ -311,7 +311,8 @@ public class VariantEvalWalker extends RodWalker<Integer, Integer> implements Tr
// for each comp track
for ( final RodBinding<VariantContext> compRod : comps ) {
// no sample stratification for comps
final Set<VariantContext> compSet = compVCs.get(compRod) == null ? new HashSet<VariantContext>(0) : compVCs.get(compRod).values().iterator().next();
final HashMap<String, Set<VariantContext>> compSetHash = compVCs.get(compRod);
final Set<VariantContext> compSet = (compSetHash == null || compSetHash.size() == 0) ? new HashSet<VariantContext>(0) : compVCs.get(compRod).values().iterator().next();
// find the comp
final VariantContext comp = findMatchingComp(eval, compSet);

View File

@ -347,9 +347,9 @@ public class VariantEvalUtils {
}
}
}
bindings.put(track, mapping);
}
bindings.put(track, mapping);
}
return bindings;

View File

@ -207,6 +207,7 @@ public class GaussianMixtureModel {
for( final boolean isNull : datum.isNull ) {
if( isNull ) { return evaluateDatumMarginalized( datum ); }
}
// Fill an array with the log10 probability coming from each Gaussian and then use MathUtils to sum them up correctly
final double[] pVarInGaussianLog10 = new double[gaussians.size()];
int gaussianIndex = 0;
for( final MultivariateGaussian gaussian : gaussians ) {
@ -215,6 +216,7 @@ public class GaussianMixtureModel {
return MathUtils.log10sumLog10(pVarInGaussianLog10); // Sum(pi_k * p(v|n,k))
}
// Used only to decide which covariate dimension is most divergent in order to report in the culprit info field annotation
public Double evaluateDatumInOneDimension( final VariantDatum datum, final int iii ) {
if(datum.isNull[iii]) { return null; }
@ -229,7 +231,7 @@ public class GaussianMixtureModel {
}
public double evaluateDatumMarginalized( final VariantDatum datum ) {
int numSamples = 0;
int numRandomDraws = 0;
double sumPVarInGaussian = 0.0;
final int numIterPerMissingAnnotation = 10; // Trade off here between speed of computation and accuracy of the marginalization
final double[] pVarInGaussianLog10 = new double[gaussians.size()];
@ -248,10 +250,10 @@ public class GaussianMixtureModel {
// add this sample's probability to the pile in order to take an average in the end
sumPVarInGaussian += Math.pow(10.0, MathUtils.log10sumLog10(pVarInGaussianLog10)); // p = 10 ^ Sum(pi_k * p(v|n,k))
numSamples++;
numRandomDraws++;
}
}
}
return Math.log10( sumPVarInGaussian / ((double) numSamples) );
return Math.log10( sumPVarInGaussian / ((double) numRandomDraws) );
}
}

View File

@ -233,13 +233,15 @@ public class VariantDataManager {
}
public void parseTrainingSets( final RefMetaDataTracker tracker, final GenomeLoc genomeLoc, final VariantContext evalVC, final VariantDatum datum, final boolean TRUST_ALL_POLYMORPHIC, final HashMap<String, Double> rodToPriorMap,
final List<RodBinding<VariantContext>> training, final List<RodBinding<VariantContext>> truth, final List<RodBinding<VariantContext>> known, final List<RodBinding<VariantContext>> badSites) {
final List<RodBinding<VariantContext>> training, final List<RodBinding<VariantContext>> truth, final List<RodBinding<VariantContext>> known, final List<RodBinding<VariantContext>> badSites, final List<RodBinding<VariantContext>> resource) {
datum.isKnown = false;
datum.atTruthSite = false;
datum.atTrainingSite = false;
datum.atAntiTrainingSite = false;
datum.prior = 2.0;
//BUGBUG: need to clean this up
for( final RodBinding<VariantContext> rod : training ) {
for( final VariantContext trainVC : tracker.getValues(rod, genomeLoc) ) {
if( isValidVariant( evalVC, trainVC, TRUST_ALL_POLYMORPHIC ) ) {
@ -264,6 +266,13 @@ public class VariantDataManager {
}
}
}
for( final RodBinding<VariantContext> rod : resource ) {
for( final VariantContext trainVC : tracker.getValues(rod, genomeLoc) ) {
if( isValidVariant( evalVC, trainVC, TRUST_ALL_POLYMORPHIC ) ) {
datum.prior = Math.max( datum.prior, (rodToPriorMap.containsKey(rod.getName()) ? rodToPriorMap.get(rod.getName()) : 0.0) );
}
}
}
for( final RodBinding<VariantContext> rod : badSites ) {
for( final VariantContext trainVC : tracker.getValues(rod, genomeLoc) ) {
if( trainVC != null ) {

View File

@ -138,6 +138,12 @@ public class VariantRecalibrator extends RodWalker<ExpandingArrayList<VariantDat
@Input(fullName="badSites", shortName = "badSites", doc="A list of known bad variants used to supplement training the negative model", required=false)
public List<RodBinding<VariantContext>> badSites = Collections.emptyList();
/**
* Any set of sites for which you would like to apply a prior probability but for which you don't want to use as training, truth, or known sites.
*/
@Input(fullName="resource", shortName = "resource", doc="A list of sites for which to apply a prior probability of being correct but which aren't used by the algorithm", required=false)
public List<RodBinding<VariantContext>> resource = Collections.emptyList();
/////////////////////////////
// Outputs
/////////////////////////////
@ -226,6 +232,7 @@ public class VariantRecalibrator extends RodWalker<ExpandingArrayList<VariantDat
allInputBindings.addAll(training);
allInputBindings.addAll(known);
allInputBindings.addAll(badSites);
allInputBindings.addAll(resource);
for( final RodBinding<VariantContext> rod : allInputBindings ) {
try {
rodToPriorMap.put(rod.getName(), (rod.getTags().containsKey("prior") ? Double.parseDouble(rod.getTags().getValue("prior")) : 0.0) );
@ -263,9 +270,9 @@ public class VariantRecalibrator extends RodWalker<ExpandingArrayList<VariantDat
datum.isTransition = datum.isSNP && VariantContextUtils.isTransition(vc);
// Loop through the training data sets and if they overlap this loci then update the prior and training status appropriately
dataManager.parseTrainingSets( tracker, context.getLocation(), vc, datum, TRUST_ALL_POLYMORPHIC, rodToPriorMap, training, truth, known, badSites );
dataManager.parseTrainingSets( tracker, context.getLocation(), vc, datum, TRUST_ALL_POLYMORPHIC, rodToPriorMap, training, truth, known, badSites, resource ); // BUGBUG: need to clean this up to be a class, not a list of all the rod bindings
double priorFactor = QualityUtils.qualToProb( datum.prior );
//if( PERFORM_PROJECT_CONSENSUS ) {
//if( PERFORM_PROJECT_CONSENSUS ) { // BUGBUG: need to resurrect this functionality?
// final double consensusPrior = QualityUtils.qualToProb( 1.0 + 5.0 * datum.consensusCount );
// priorFactor = 1.0 - ((1.0 - priorFactor) * (1.0 - consensusPrior));
//}

View File

@ -26,6 +26,7 @@
package org.broadinstitute.sting.gatk.walkers.variantrecalibration;
import org.apache.log4j.Logger;
import org.broadinstitute.sting.gatk.GenomeAnalysisEngine;
import org.broadinstitute.sting.utils.exceptions.UserException;
import java.util.List;
@ -43,6 +44,7 @@ public class VariantRecalibratorEngine {
/////////////////////////////
protected final static Logger logger = Logger.getLogger(VariantRecalibratorEngine.class);
public final static double MIN_ACCEPTABLE_LOD_SCORE = -20000.0;
// the unified argument collection
final private VariantRecalibratorArgumentCollection VRAC;
@ -72,13 +74,14 @@ public class VariantRecalibratorEngine {
for( final VariantDatum datum : data ) {
final double thisLod = evaluateDatum( datum, model );
if( Double.isNaN(thisLod) ) {
if( evaluateContrastively ) {
throw new UserException("NaN LOD value assigned. Clustering with this few variants and these annotations is unsafe. Please consider raising the number of variants used to train the negative model (via --percentBadVariants 0.05, for example) or lowering the maximum number of Gaussians to use in the model (via --maxGaussians 4, for example)");
} else {
throw new UserException("NaN LOD value assigned. Clustering with this few variants and these annotations is unsafe.");
}
throw new UserException("NaN LOD value assigned. Clustering with this few variants and these annotations is unsafe. Please consider raising the number of variants used to train the negative model (via --percentBadVariants 0.05, for example) or lowering the maximum number of Gaussians to use in the model (via --maxGaussians 4, for example)");
}
datum.lod = ( evaluateContrastively ? (datum.prior + datum.lod - thisLod) : thisLod );
datum.lod = ( evaluateContrastively ?
( Double.isInfinite(datum.lod) ? // positive model said negative infinity
( MIN_ACCEPTABLE_LOD_SCORE + GenomeAnalysisEngine.getRandomGenerator().nextDouble() * MIN_ACCEPTABLE_LOD_SCORE ) // Negative infinity lod values are possible when covariates are extremely far away from their tight Gaussians
: datum.prior + datum.lod - thisLod) // contrastive evaluation: (prior + positive model - negative model)
: thisLod ); // positive model only so set the lod and return
}
}

View File

@ -209,7 +209,7 @@ public class SelectVariants extends RodWalker<Integer, Integer> {
* Note that sample exclusion takes precedence over inclusion, so that if a sample is in both lists it will be excluded.
*/
@Argument(fullName="exclude_sample_file", shortName="xl_sf", doc="File containing a list of samples (one per line) to exclude. Can be specified multiple times", required=false)
public Set<File> XLsampleFiles;
public Set<File> XLsampleFiles = new HashSet<File>(0);
/**
* Note that these expressions are evaluated *after* the specified samples are extracted and the INFO field annotations are updated.
@ -344,12 +344,10 @@ public class SelectVariants extends RodWalker<Integer, Integer> {
}
// now, exclude any requested samples
if (XLsampleFiles != null)
if(!XLsampleFiles.isEmpty()) {
Collection<String> XLsamplesFromFile = SampleUtils.getSamplesFromFiles(XLsampleFiles);
samples.removeAll(XLsamplesFromFile);
samples.removeAll(XLsampleNames);
}
Collection<String> XLsamplesFromFile = SampleUtils.getSamplesFromFiles(XLsampleFiles);
samples.removeAll(XLsamplesFromFile);
samples.removeAll(XLsampleNames);
if ( samples.size() == 0 && !NO_SAMPLES_SPECIFIED )
throw new UserException("All samples requested to be included were also requested to be excluded.");

View File

@ -0,0 +1,100 @@
/*
* Copyright (c) 2011, The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.jna.drmaa.v1_0;
import org.ggf.drmaa.DrmaaException;
import org.ggf.drmaa.JobInfo;
import java.util.Map;
/**
* JNA mapping from Java to C DRMAA binding.
*/
public class JnaJobInfo implements JobInfo {
private final String jobId;
private final Map<String, String> rusage;
private final boolean hasExited;
private final int exitStatus;
private final boolean hasSignaled;
private final String terminatingSignal;
private final boolean hasCoreDump;
private final boolean wasAborted;
public JnaJobInfo(String jobId, Map<String, String> rusage, boolean hasExited, int exitStatus, boolean hasSignaled, String terminatingSignal, boolean hasCoreDump, boolean wasAborted) {
this.jobId = jobId;
this.rusage = rusage;
this.hasExited = hasExited;
this.exitStatus = exitStatus;
this.hasSignaled = hasSignaled;
this.terminatingSignal = terminatingSignal;
this.hasCoreDump = hasCoreDump;
this.wasAborted = wasAborted;
}
@Override
public String getJobId() throws DrmaaException {
return this.jobId;
}
@Override
public Map getResourceUsage() throws DrmaaException {
return rusage;
}
@Override
public boolean hasExited() throws DrmaaException {
return hasExited;
}
@Override
public int getExitStatus() throws DrmaaException {
if (!hasExited)
throw new IllegalStateException("job has not exited");
return exitStatus;
}
@Override
public boolean hasSignaled() throws DrmaaException {
return hasSignaled;
}
@Override
public String getTerminatingSignal() throws DrmaaException {
if (!hasSignaled)
throw new IllegalStateException("job has not signaled");
return terminatingSignal;
}
@Override
public boolean hasCoreDump() throws DrmaaException {
return hasCoreDump;
}
@Override
public boolean wasAborted() throws DrmaaException {
return wasAborted;
}
}

View File

@ -0,0 +1,315 @@
/*
* Copyright (c) 2011, The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.jna.drmaa.v1_0;
import com.sun.jna.Pointer;
import org.ggf.drmaa.*;
import java.util.*;
/**
* JNA mapping from Java to C DRMAA binding.
*/
public class JnaJobTemplate implements JobTemplate {
private final JnaSession session;
private final Pointer jt;
public JnaJobTemplate(JnaSession session, Pointer jt) {
this.session = session;
this.jt = jt;
}
public Pointer getPointer() {
return jt;
}
@Override
public void setRemoteCommand(String s) throws DrmaaException {
JnaSession.setAttribute(jt, LibDrmaa.DRMAA_REMOTE_COMMAND, s);
}
@Override
public String getRemoteCommand() throws DrmaaException {
return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_REMOTE_COMMAND);
}
@SuppressWarnings("unchecked")
@Override
public void setArgs(List list) throws DrmaaException {
JnaSession.setVectorAttribute(jt, LibDrmaa.DRMAA_V_ARGV, list);
}
@Override
public List getArgs() throws DrmaaException {
return JnaSession.getVectorAttribute(jt, LibDrmaa.DRMAA_V_ARGV);
}
@Override
public void setJobSubmissionState(int state) throws DrmaaException {
String stateString;
if (state == JobTemplate.HOLD_STATE)
stateString = LibDrmaa.DRMAA_SUBMISSION_STATE_HOLD;
else if (state == JobTemplate.ACTIVE_STATE)
stateString = LibDrmaa.DRMAA_SUBMISSION_STATE_ACTIVE;
else
throw new InvalidAttributeValueException("jobSubmissionState attribute is invalid");
JnaSession.setAttribute(jt, LibDrmaa.DRMAA_JS_STATE, stateString);
}
@Override
public int getJobSubmissionState() throws DrmaaException {
int state;
String stateString = JnaSession.getAttribute(jt, LibDrmaa.DRMAA_JS_STATE);
if (LibDrmaa.DRMAA_SUBMISSION_STATE_HOLD.equals(stateString))
state = JobTemplate.HOLD_STATE;
else if (LibDrmaa.DRMAA_SUBMISSION_STATE_ACTIVE.equals(stateString))
state = JobTemplate.ACTIVE_STATE;
else
throw new InvalidAttributeValueException("jobSubmissionState attribute is invalid");
return state;
}
@SuppressWarnings("unchecked")
@Override
public void setJobEnvironment(Map env) throws DrmaaException {
JnaSession.setVectorAttribute(jt, LibDrmaa.DRMAA_V_ENV, JnaSession.mapToCollection(env));
}
@SuppressWarnings("unchecked")
@Override
public Map getJobEnvironment() throws DrmaaException {
return JnaSession.collectionToMap(JnaSession.getVectorAttribute(jt, LibDrmaa.DRMAA_V_ENV));
}
@Override
public void setWorkingDirectory(String s) throws DrmaaException {
JnaSession.setAttribute(jt, LibDrmaa.DRMAA_WD, s);
}
@Override
public String getWorkingDirectory() throws DrmaaException {
return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_WD);
}
@Override
public void setJobCategory(String s) throws DrmaaException {
JnaSession.setAttribute(jt, LibDrmaa.DRMAA_JOB_CATEGORY, s);
}
@Override
public String getJobCategory() throws DrmaaException {
return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_JOB_CATEGORY);
}
@Override
public void setNativeSpecification(String s) throws DrmaaException {
JnaSession.setAttribute(jt, LibDrmaa.DRMAA_NATIVE_SPECIFICATION, s);
}
@Override
public String getNativeSpecification() throws DrmaaException {
return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_NATIVE_SPECIFICATION);
}
@SuppressWarnings("unchecked")
@Override
public void setEmail(Set set) throws DrmaaException {
JnaSession.setVectorAttribute(jt, LibDrmaa.DRMAA_V_EMAIL, set);
}
@SuppressWarnings("unchecked")
@Override
public Set getEmail() throws DrmaaException {
return new LinkedHashSet<String>(JnaSession.getVectorAttribute(jt, LibDrmaa.DRMAA_V_EMAIL));
}
@Override
public void setBlockEmail(boolean b) throws DrmaaException {
JnaSession.setAttribute(jt, LibDrmaa.DRMAA_BLOCK_EMAIL, b ? "1" : "0");
}
@Override
public boolean getBlockEmail() throws DrmaaException {
return "1".equals(JnaSession.getAttribute(jt, LibDrmaa.DRMAA_BLOCK_EMAIL));
}
@Override
public void setStartTime(PartialTimestamp partialTimestamp) throws DrmaaException {
JnaSession.setPartialTime(jt, LibDrmaa.DRMAA_START_TIME, partialTimestamp);
}
@Override
public PartialTimestamp getStartTime() throws DrmaaException {
return JnaSession.getPartialTime(jt, LibDrmaa.DRMAA_START_TIME);
}
@Override
public void setJobName(String s) throws DrmaaException {
JnaSession.setAttribute(jt, LibDrmaa.DRMAA_JOB_NAME, s);
}
@Override
public String getJobName() throws DrmaaException {
return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_JOB_NAME);
}
@Override
public void setInputPath(String s) throws DrmaaException {
JnaSession.setAttribute(jt, LibDrmaa.DRMAA_INPUT_PATH, s);
}
@Override
public String getInputPath() throws DrmaaException {
return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_INPUT_PATH);
}
@Override
public void setOutputPath(String s) throws DrmaaException {
JnaSession.setAttribute(jt, LibDrmaa.DRMAA_OUTPUT_PATH, s);
}
@Override
public String getOutputPath() throws DrmaaException {
return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_OUTPUT_PATH);
}
@Override
public void setErrorPath(String s) throws DrmaaException {
JnaSession.setAttribute(jt, LibDrmaa.DRMAA_ERROR_PATH, s);
}
@Override
public String getErrorPath() throws DrmaaException {
return JnaSession.getAttribute(jt, LibDrmaa.DRMAA_ERROR_PATH);
}
@Override
public void setJoinFiles(boolean b) throws DrmaaException {
JnaSession.setAttribute(jt, LibDrmaa.DRMAA_JOIN_FILES, b ? "y" : "n");
}
@Override
public boolean getJoinFiles() throws DrmaaException {
return "y".equals(JnaSession.getAttribute(jt, LibDrmaa.DRMAA_JOIN_FILES));
}
@Override
public void setTransferFiles(FileTransferMode fileTransferMode) throws DrmaaException {
StringBuilder buf = new StringBuilder();
if (fileTransferMode.getInputStream())
buf.append('i');
if (fileTransferMode.getOutputStream())
buf.append('o');
if (fileTransferMode.getErrorStream())
buf.append('e');
JnaSession.setAttribute(jt, LibDrmaa.DRMAA_TRANSFER_FILES, buf.toString());
}
@Override
public FileTransferMode getTransferFiles() throws DrmaaException {
String mode = JnaSession.getAttribute(jt, LibDrmaa.DRMAA_TRANSFER_FILES);
if (mode == null)
return null;
FileTransferMode fileTransferMode = new FileTransferMode();
fileTransferMode.setInputStream(mode.indexOf('i') >= 0);
fileTransferMode.setOutputStream(mode.indexOf('o') >= 0);
fileTransferMode.setErrorStream(mode.indexOf('e') >= 0);
return fileTransferMode;
}
@Override
public void setDeadlineTime(PartialTimestamp partialTimestamp) throws DrmaaException {
JnaSession.setPartialTime(jt, LibDrmaa.DRMAA_DEADLINE_TIME, partialTimestamp);
}
@Override
public PartialTimestamp getDeadlineTime() throws DrmaaException {
return JnaSession.getPartialTime(jt, LibDrmaa.DRMAA_DEADLINE_TIME);
}
@Override
public void setHardWallclockTimeLimit(long l) throws DrmaaException {
JnaSession.setAttribute(jt, LibDrmaa.DRMAA_WCT_HLIMIT, JnaSession.formatLimit(l));
}
@Override
public long getHardWallclockTimeLimit() throws DrmaaException {
return JnaSession.parseLimit(JnaSession.getAttribute(jt, LibDrmaa.DRMAA_WCT_HLIMIT));
}
@Override
public void setSoftWallclockTimeLimit(long l) throws DrmaaException {
JnaSession.setAttribute(jt, LibDrmaa.DRMAA_WCT_SLIMIT, JnaSession.formatLimit(l));
}
@Override
public long getSoftWallclockTimeLimit() throws DrmaaException {
return JnaSession.parseLimit(JnaSession.getAttribute(jt, LibDrmaa.DRMAA_WCT_SLIMIT));
}
@Override
public void setHardRunDurationLimit(long l) throws DrmaaException {
JnaSession.setAttribute(jt, LibDrmaa.DRMAA_DURATION_HLIMIT, JnaSession.formatLimit(l));
}
@Override
public long getHardRunDurationLimit() throws DrmaaException {
return JnaSession.parseLimit(JnaSession.getAttribute(jt, LibDrmaa.DRMAA_DURATION_HLIMIT));
}
@Override
public void setSoftRunDurationLimit(long l) throws DrmaaException {
JnaSession.setAttribute(jt, LibDrmaa.DRMAA_DURATION_SLIMIT, JnaSession.formatLimit(l));
}
@Override
public long getSoftRunDurationLimit() throws DrmaaException {
return JnaSession.parseLimit(JnaSession.getAttribute(jt, LibDrmaa.DRMAA_DURATION_SLIMIT));
}
@Override
public Set getAttributeNames() throws DrmaaException {
return JnaSession.getAttrNames();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof JnaJobTemplate))
return false;
JnaJobTemplate other = (JnaJobTemplate) obj;
return this.jt.equals(other.jt) && this.session.equals(other.session);
}
@Override
public int hashCode() {
return jt.hashCode();
}
}

View File

@ -0,0 +1,450 @@
/*
* Copyright (c) 2011, The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.jna.drmaa.v1_0;
import com.sun.jna.Memory;
import com.sun.jna.NativeLong;
import com.sun.jna.Pointer;
import com.sun.jna.StringArray;
import com.sun.jna.ptr.IntByReference;
import com.sun.jna.ptr.PointerByReference;
import org.ggf.drmaa.*;
import java.text.ParseException;
import java.util.*;
/**
* JNA mapping from Java to C DRMAA binding.
* See: Java and C Binding Documents on http://drmaa.org
*/
public class JnaSession implements Session {
private static final PartialTimestampFormat PARTIAL_TIMESTAMP_FORMAT = new PartialTimestampFormat();
private static final ThreadLocal<Memory> threadError = new ThreadLocal<Memory>() {
@Override
protected Memory initialValue() {
return new Memory(LibDrmaa.DRMAA_ERROR_STRING_BUFFER);
}
};
@Override
public void init(String contact) throws DrmaaException {
checkError(LibDrmaa.drmaa_init(contact, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
}
@Override
public void exit() throws DrmaaException {
checkError(LibDrmaa.drmaa_exit(getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
}
@Override
public JobTemplate createJobTemplate() throws DrmaaException {
PointerByReference jtRef = new PointerByReference();
checkError(LibDrmaa.drmaa_allocate_job_template(jtRef, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
return new JnaJobTemplate(this, jtRef.getValue());
}
@Override
public void deleteJobTemplate(JobTemplate jobTemplate) throws DrmaaException {
JnaJobTemplate jnaJobTemplate = (JnaJobTemplate) jobTemplate;
checkError(LibDrmaa.drmaa_delete_job_template(jnaJobTemplate.getPointer(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
}
@Override
public String runJob(JobTemplate jobTemplate) throws DrmaaException {
Memory jobId = new Memory(LibDrmaa.DRMAA_JOBNAME_BUFFER);
JnaJobTemplate jnaJobTemplate = (JnaJobTemplate) jobTemplate;
checkError(LibDrmaa.drmaa_run_job(jobId, LibDrmaa.DRMAA_JOBNAME_BUFFER_LEN, jnaJobTemplate.getPointer(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
return jobId.getString(0);
}
@Override
public List runBulkJobs(JobTemplate jobTemplate, int start, int end, int incr) throws DrmaaException {
PointerByReference jobIds = new PointerByReference();
JnaJobTemplate jnaJobTemplate = (JnaJobTemplate) jobTemplate;
checkError(LibDrmaa.drmaa_run_bulk_jobs(jobIds, jnaJobTemplate.getPointer(), start, end, incr, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
try {
return getJobIds(jobIds);
} finally {
releaseJobIds(jobIds);
}
}
@Override
public void control(String jobId, int action) throws DrmaaException {
checkError(LibDrmaa.drmaa_control(jobId, action, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
}
@SuppressWarnings("unchecked")
@Override
public void synchronize(List list, long timeout, boolean dispose) throws DrmaaException {
StringArray jobIds = new StringArray((String[]) list.toArray(new String[list.size()]));
checkError(LibDrmaa.drmaa_synchronize(jobIds, new NativeLong(timeout), dispose ? 1 : 0, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
}
@Override
public JobInfo wait(String jobId, long timeout) throws DrmaaException {
Memory jobIdOut = new Memory(LibDrmaa.DRMAA_JOBNAME_BUFFER);
IntByReference stat = new IntByReference();
PointerByReference rusage = new PointerByReference();
IntByReference exited = new IntByReference();
IntByReference exitStatus = new IntByReference();
IntByReference signaled = new IntByReference();
Memory signal = new Memory(LibDrmaa.DRMAA_SIGNAL_BUFFER);
IntByReference coreDumped = new IntByReference();
IntByReference aborted = new IntByReference();
int errnum;
errnum = LibDrmaa.drmaa_wait(jobId, jobIdOut, LibDrmaa.DRMAA_JOBNAME_BUFFER_LEN, stat, new NativeLong(timeout), rusage, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
Map<String, String> rusageMap;
if (errnum == LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_RUSAGE) {
rusageMap = null;
} else {
try {
rusageMap = collectionToMap(getAttrValues(rusage));
} finally {
releaseAttrValues(rusage);
}
}
checkError(LibDrmaa.drmaa_wifexited(exited, stat.getValue(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
if (exited.getValue() != 0) {
checkError(LibDrmaa.drmaa_wexitstatus(exitStatus, stat.getValue(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
}
checkError(LibDrmaa.drmaa_wifsignaled(signaled, stat.getValue(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
if (signaled.getValue() != 0) {
checkError(LibDrmaa.drmaa_wtermsig(signal, LibDrmaa.DRMAA_SIGNAL_BUFFER_LEN, stat.getValue(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
checkError(LibDrmaa.drmaa_wcoredump(coreDumped, stat.getValue(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
}
checkError(LibDrmaa.drmaa_wifaborted(aborted, stat.getValue(), getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
return new JnaJobInfo(jobIdOut.getString(0), rusageMap, exited.getValue() != 0, exitStatus.getValue(),
signaled.getValue() != 0, signal.getString(0), coreDumped.getValue() != 0, aborted.getValue() != 0);
}
@Override
public int getJobProgramStatus(String jobId) throws DrmaaException {
IntByReference remotePs = new IntByReference();
checkError(LibDrmaa.drmaa_job_ps(jobId, remotePs, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
return remotePs.getValue();
}
@Override
public String getContact() {
Memory contact = new Memory(LibDrmaa.DRMAA_CONTACT_BUFFER);
try {
checkError(LibDrmaa.drmaa_get_contact(contact, LibDrmaa.DRMAA_CONTACT_BUFFER_LEN, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
} catch (DrmaaException e) {
// DRMAA spec says this method should throw DrmaaException.
// Why doesn't interface implement this?
throw new RuntimeException(e);
}
return contact.getString(0);
}
@Override
public Version getVersion() {
IntByReference major = new IntByReference();
IntByReference minor = new IntByReference();
try {
checkError(LibDrmaa.drmaa_version(major, minor, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
} catch (DrmaaException e) {
// DRMAA spec says this method should throw DrmaaException.
// Why doesn't interface implement this?
throw new RuntimeException(e);
}
return new Version(major.getValue(), minor.getValue());
}
@Override
public String getDrmSystem() {
Memory drmSystem = new Memory(LibDrmaa.DRMAA_DRM_SYSTEM_BUFFER);
try {
checkError(LibDrmaa.drmaa_get_DRM_system(drmSystem, LibDrmaa.DRMAA_DRM_SYSTEM_BUFFER_LEN, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
} catch (DrmaaException e) {
// DRMAA spec says this method should throw DrmaaException.
// Why doesn't interface implement this?
throw new RuntimeException(e);
}
return drmSystem.getString(0);
}
@Override
public String getDrmaaImplementation() {
Memory drmaaImplementation = new Memory(LibDrmaa.DRMAA_DRMAA_IMPLEMENTATION_BUFFER);
try {
checkError(LibDrmaa.drmaa_get_DRMAA_implementation(drmaaImplementation, LibDrmaa.DRMAA_DRMAA_IMPLEMENTATION_BUFFER_LEN, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
} catch (DrmaaException e) {
// DRMAA spec says this method should throw DrmaaException.
// Why doesn't interface implement this?
throw new RuntimeException(e);
}
return drmaaImplementation.getString(0);
}
public static void setAttribute(Pointer jt, String name, String value) throws DrmaaException {
checkError(LibDrmaa.drmaa_set_attribute(jt, name, value, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
}
public static String getAttribute(Pointer jt, String name) throws DrmaaException {
Memory attrBuffer = new Memory(LibDrmaa.DRMAA_ATTR_BUFFER);
checkError(LibDrmaa.drmaa_get_attribute(jt, name, attrBuffer, LibDrmaa.DRMAA_ATTR_BUFFER_LEN, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
return attrBuffer.getString(0);
}
public static void setVectorAttribute(Pointer jt, String name, Collection<String> values) throws DrmaaException {
StringArray valuesArray = new StringArray(values.toArray(new String[values.size()]));
checkError(LibDrmaa.drmaa_set_vector_attribute(jt, name, valuesArray, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
}
public static List<String> getVectorAttribute(Pointer jt, String name) throws DrmaaException {
PointerByReference values = new PointerByReference();
checkError(LibDrmaa.drmaa_get_vector_attribute(jt, name, values, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
try {
return getAttrValues(values);
} finally {
releaseAttrValues(values);
}
}
public static void setPartialTime(Pointer jt, String name, PartialTimestamp partialTimestamp) throws DrmaaException {
setAttribute(jt, name, PARTIAL_TIMESTAMP_FORMAT.format(partialTimestamp));
}
public static PartialTimestamp getPartialTime(Pointer jt, String name) throws DrmaaException {
String time = getAttribute(jt, name);
if (time == null)
return null;
try {
return PARTIAL_TIMESTAMP_FORMAT.parse(time);
} catch (ParseException e) {
throw new InternalException(name + " property is unparsable");
}
}
public static Set<String> getAttrNames() throws DrmaaException {
PointerByReference values = new PointerByReference();
checkError(LibDrmaa.drmaa_get_attribute_names(values, getError(), LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN));
try {
return new LinkedHashSet<String>(getAttrNames(values));
} finally {
releaseAttrNames(values);
}
}
public static Collection<String> mapToCollection(Map<String, String> map) {
Collection<String> collection = new LinkedHashSet<String>();
for (Map.Entry<String, String> entry: map.entrySet())
collection.add(entry.getKey() + "=" + entry.getValue());
return collection;
}
public static Map<String, String> collectionToMap(Collection<String> list) {
Map<String, String> map = new LinkedHashMap<String, String>();
for (String entry: list) {
if (entry == null)
continue;
int equals = entry.indexOf('=');
if (equals < 0)
continue;
map.put(entry.substring(0, equals), entry.substring(equals + 1));
}
return map;
}
public static String formatLimit(long secs) {
long seconds = (secs % 60);
long minutes = (secs / 60) % 60;
long hours = (secs / 3600);
return String.format("%d:%02d:%02d", hours, minutes, seconds);
}
public static long parseLimit(String limit) {
long seconds = 0;
if (limit != null) {
for (String token: limit.split(":")) {
seconds *= 60;
seconds += Long.parseLong(token);
}
}
return seconds;
}
private static List<String> getAttrNames(PointerByReference names) throws DrmaaException {
List<String> namesList = new ArrayList<String>();
IntByReference size = new IntByReference();
int errnum;
errnum = LibDrmaa.drmaa_get_num_attr_names(names.getValue(), size);
checkError(errnum, "unable to get attribute names");
int num = size.getValue();
Memory value = new Memory(LibDrmaa.DRMAA_ATTR_BUFFER);
for (int i = 1; i <= num; i++) {
errnum = LibDrmaa.drmaa_get_next_attr_name(names.getValue(), value, LibDrmaa.DRMAA_ATTR_BUFFER_LEN);
checkError(errnum, "unable to get attribute name " + i);
if (errnum == LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_MORE_ELEMENTS)
break;
namesList.add(value.getString(0));
}
return namesList;
}
private static List<String> getAttrValues(PointerByReference values) throws DrmaaException {
List<String> valuesList = new ArrayList<String>();
IntByReference size = new IntByReference();
int errnum;
errnum = LibDrmaa.drmaa_get_num_attr_values(values.getValue(), size);
checkError(errnum, "unable to get attribute values");
int num = size.getValue();
Memory value = new Memory(LibDrmaa.DRMAA_ATTR_BUFFER);
for (int i = 1; i <= num; i++) {
errnum = LibDrmaa.drmaa_get_next_attr_value(values.getValue(), value, LibDrmaa.DRMAA_ATTR_BUFFER_LEN);
checkError(errnum, "unable to get attribute value " + i);
if (errnum == LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_MORE_ELEMENTS)
break;
valuesList.add(value.getString(0));
}
return valuesList;
}
private static List<String> getJobIds(PointerByReference jobIds) throws DrmaaException {
List<String> jobIdsList = new ArrayList<String>();
IntByReference size = new IntByReference();
int errnum;
errnum = LibDrmaa.drmaa_get_num_job_ids(jobIds.getValue(), size);
checkError(errnum, "unable to get jobIds");
int num = size.getValue();
Memory value = new Memory(LibDrmaa.DRMAA_JOBNAME_BUFFER);
for (int i = 1; i <= num; i++) {
errnum = LibDrmaa.drmaa_get_next_job_id(jobIds.getValue(), value, LibDrmaa.DRMAA_JOBNAME_BUFFER_LEN);
checkError(errnum, "unable to get jobId " + i);
if (errnum == LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_MORE_ELEMENTS)
break;
jobIdsList.add(value.getString(0));
}
return jobIdsList;
}
private static void releaseAttrNames(PointerByReference names) throws DrmaaException {
LibDrmaa.drmaa_release_attr_names(names.getValue());
}
private static void releaseAttrValues(PointerByReference values) throws DrmaaException {
LibDrmaa.drmaa_release_attr_values(values.getValue());
}
private static void releaseJobIds(PointerByReference jobIds) throws DrmaaException {
LibDrmaa.drmaa_release_job_ids(jobIds.getValue());
}
private static Memory getError() {
return threadError.get();
}
private static void checkError(int errnum) throws DrmaaException {
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
checkError(errnum, getError().getString(0));
}
private static void checkError(int errnum, String error) throws DrmaaException {
switch (errnum) {
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS:
break;
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_INTERNAL_ERROR:
throw new InternalException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE:
throw new DrmCommunicationException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_AUTH_FAILURE:
throw new AuthorizationException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_INVALID_ARGUMENT:
throw new IllegalArgumentException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_ACTIVE_SESSION:
throw new NoActiveSessionException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_MEMORY:
throw new OutOfMemoryError(error);
/* -------------- init and exit specific --------------- */
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_INVALID_CONTACT_STRING:
throw new InvalidContactStringException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_DEFAULT_CONTACT_STRING_ERROR:
throw new DefaultContactStringException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_DEFAULT_CONTACT_STRING_SELECTED:
throw new NoDefaultContactStringException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_DRMS_INIT_FAILED:
throw new DrmsInitException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_ALREADY_ACTIVE_SESSION:
throw new AlreadyActiveSessionException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_DRMS_EXIT_ERROR:
throw new DrmsExitException(error);
/* ---------------- job attributes specific -------------- */
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_INVALID_ATTRIBUTE_FORMAT:
throw new InvalidAttributeFormatException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_INVALID_ATTRIBUTE_VALUE:
throw new InvalidAttributeValueException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_CONFLICTING_ATTRIBUTE_VALUES:
throw new ConflictingAttributeValuesException(error);
/* --------------------- job submission specific -------------- */
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_TRY_LATER:
throw new TryLaterException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_DENIED_BY_DRM:
throw new DeniedByDrmException(error);
/* ------------------------------- job control specific ---------------- */
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_INVALID_JOB:
throw new InvalidJobException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_RESUME_INCONSISTENT_STATE:
throw new ResumeInconsistentStateException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUSPEND_INCONSISTENT_STATE:
throw new SuspendInconsistentStateException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_HOLD_INCONSISTENT_STATE:
throw new HoldInconsistentStateException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_RELEASE_INCONSISTENT_STATE:
throw new ReleaseInconsistentStateException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_EXIT_TIMEOUT:
throw new ExitTimeoutException(error);
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_RUSAGE:
break;
case LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_NO_MORE_ELEMENTS:
break;
default:
throw new IllegalArgumentException(String.format("Unknown error code %d: %s", errnum, error));
}
}
}

View File

@ -0,0 +1,39 @@
/*
* Copyright (c) 2011, The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.jna.drmaa.v1_0;
import org.ggf.drmaa.Session;
import org.ggf.drmaa.SessionFactory;
/**
* JNA mapping from Java to C DRMAA binding.
*/
@SuppressWarnings("unused")
public class JnaSessionFactory extends SessionFactory {
@Override
public Session getSession() {
return new JnaSession();
}
}

View File

@ -0,0 +1,754 @@
/*
* Copyright (c) 2011, The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
/*___INFO__MARK_BEGIN__*/
/*************************************************************************
*
* The Contents of this file are made available subject to the terms of
* the Sun Industry Standards Source License Version 1.2
*
* Sun Microsystems Inc., March, 2001
*
*
* Sun Industry Standards Source License Version 1.2
* =================================================
* The contents of this file are subject to the Sun Industry Standards
* Source License Version 1.2 (the "License"); You may not use this file
* except in compliance with the License. You may obtain a copy of the
* License at http://gridengine.sunsource.net/Gridengine_SISSL_license.html
*
* Software provided under this License is provided on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING,
* WITHOUT LIMITATION, WARRANTIES THAT THE SOFTWARE IS FREE OF DEFECTS,
* MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE, OR NON-INFRINGING.
* See the License for the specific provisions governing your rights and
* obligations concerning the Software.
*
* The Initial Developer of the Original Code is: Sun Microsystems, Inc.
*
* Copyright: 2001 by Sun Microsystems, Inc.
*
* All Rights Reserved.
*
************************************************************************/
/*___INFO__MARK_END__*/
package org.broadinstitute.sting.jna.drmaa.v1_0;
import com.sun.jna.*;
import com.sun.jna.ptr.IntByReference;
import com.sun.jna.ptr.PointerByReference;
@SuppressWarnings("unused")
public class LibDrmaa {
static {
Native.register("drmaa");
}
/* see www.drmaa.org for more details on the DRMAA specification */
/****** DRMAA/-DRMAA_Interface *************************************************
* NAME
* DRMAA_Interface -- DRMAA interface
*
* FUNCTION
* The enlisted functions specify the C/C++ binding of the DRMAA interface
* specification.
*
* SEE ALSO
* DRMAA/drmaa_get_next_attr_name()
* DRMAA/drmaa_get_next_attr_value()
* DRMAA/drmaa_get_next_job_id()
* DRMAA/drmaa_release_attr_names()
* DRMAA/drmaa_release_attr_values()
* DRMAA/drmaa_release_job_ids()
* DRMAA/drmaa_init()
* DRMAA/drmaa_exit()
* DRMAA/drmaa_allocate_job_template()
* DRMAA/drmaa_delete_job_template()
* DRMAA/drmaa_set_attribute()
* DRMAA/drmaa_get_attribute()
* DRMAA/drmaa_set_vector_attribute()
* DRMAA/drmaa_get_vector_attribute()
* DRMAA/drmaa_get_attribute_names()
* DRMAA/drmaa_get_vector_attribute_names()
* DRMAA/drmaa_run_job()
* DRMAA/drmaa_run_bulk_jobs()
* DRMAA/drmaa_control()
* DRMAA/drmaa_synchronize()
* DRMAA/drmaa_wait()
* DRMAA/drmaa_wifexited()
* DRMAA/drmaa_wexitstatus()
* DRMAA/drmaa_wifsignaled()
* DRMAA/drmaa_wtermsig()
* DRMAA/drmaa_wcoredump()
* DRMAA/drmaa_wifaborted()
* DRMAA/drmaa_job_ps()
* DRMAA/drmaa_strerror()
* DRMAA/drmaa_get_contact()
* DRMAA/drmaa_version()
* DRMAA/drmaa_get_DRM_system()
*******************************************************************************/
/* ------------------- Constants ------------------- */
/*
* some not yet agreed buffer length constants
* these are recommended minimum values
*/
/* drmaa_get_attribute() */
public static final long DRMAA_ATTR_BUFFER = 1024;
public static final NativeLong DRMAA_ATTR_BUFFER_LEN = new NativeLong(DRMAA_ATTR_BUFFER - 1);
/* drmaa_get_contact() */
public static final long DRMAA_CONTACT_BUFFER = 1024;
public static final NativeLong DRMAA_CONTACT_BUFFER_LEN = new NativeLong(DRMAA_CONTACT_BUFFER - 1);
/* drmaa_get_DRM_system() */
public static final long DRMAA_DRM_SYSTEM_BUFFER = 1024;
public static final NativeLong DRMAA_DRM_SYSTEM_BUFFER_LEN = new NativeLong(DRMAA_DRM_SYSTEM_BUFFER - 1);
/* drmaa_get_DRM_system() */
public static final long DRMAA_DRMAA_IMPLEMENTATION_BUFFER = 1024;
public static final NativeLong DRMAA_DRMAA_IMPLEMENTATION_BUFFER_LEN = new NativeLong(DRMAA_DRMAA_IMPLEMENTATION_BUFFER - 1);
/*
* Agreed buffer length constants
* these are recommended minimum values
*/
public static final long DRMAA_ERROR_STRING_BUFFER = 1024;
public static final long DRMAA_JOBNAME_BUFFER = 1024;
public static final long DRMAA_SIGNAL_BUFFER = 32;
public static final NativeLong DRMAA_ERROR_STRING_BUFFER_LEN = new NativeLong(DRMAA_ERROR_STRING_BUFFER - 1);
public static final NativeLong DRMAA_JOBNAME_BUFFER_LEN = new NativeLong(DRMAA_JOBNAME_BUFFER - 1);
public static final NativeLong DRMAA_SIGNAL_BUFFER_LEN = new NativeLong(DRMAA_SIGNAL_BUFFER - 1);
/*
* Agreed constants
*/
public static final NativeLong DRMAA_TIMEOUT_WAIT_FOREVER = new NativeLong(-1);
public static final NativeLong DRMAA_TIMEOUT_NO_WAIT = new NativeLong(0);
public static final String DRMAA_JOB_IDS_SESSION_ANY = "DRMAA_JOB_IDS_SESSION_ANY";
public static final String DRMAA_JOB_IDS_SESSION_ALL = "DRMAA_JOB_IDS_SESSION_ALL";
public static final String DRMAA_SUBMISSION_STATE_ACTIVE = "drmaa_active";
public static final String DRMAA_SUBMISSION_STATE_HOLD = "drmaa_hold";
/*
* Agreed placeholder names
*/
public static final String DRMAA_PLACEHOLDER_INCR = "$drmaa_incr_ph$";
public static final String DRMAA_PLACEHOLDER_HD = "$drmaa_hd_ph$";
public static final String DRMAA_PLACEHOLDER_WD = "$drmaa_wd_ph$";
/*
* Agreed names of job template attributes
*/
public static final String DRMAA_REMOTE_COMMAND = "drmaa_remote_command";
public static final String DRMAA_JS_STATE = "drmaa_js_state";
public static final String DRMAA_WD = "drmaa_wd";
public static final String DRMAA_JOB_CATEGORY = "drmaa_job_category";
public static final String DRMAA_NATIVE_SPECIFICATION = "drmaa_native_specification";
public static final String DRMAA_BLOCK_EMAIL = "drmaa_block_email";
public static final String DRMAA_START_TIME = "drmaa_start_time";
public static final String DRMAA_JOB_NAME = "drmaa_job_name";
public static final String DRMAA_INPUT_PATH = "drmaa_input_path";
public static final String DRMAA_OUTPUT_PATH = "drmaa_output_path";
public static final String DRMAA_ERROR_PATH = "drmaa_error_path";
public static final String DRMAA_JOIN_FILES = "drmaa_join_files";
public static final String DRMAA_TRANSFER_FILES = "drmaa_transfer_files";
public static final String DRMAA_DEADLINE_TIME = "drmaa_deadline_time";
public static final String DRMAA_WCT_HLIMIT = "drmaa_wct_hlimit";
public static final String DRMAA_WCT_SLIMIT = "drmaa_wct_slimit";
public static final String DRMAA_DURATION_HLIMIT = "drmaa_duration_hlimit";
public static final String DRMAA_DURATION_SLIMIT = "drmaa_duration_slimit";
/* names of job template vector attributes */
public static final String DRMAA_V_ARGV = "drmaa_v_argv";
public static final String DRMAA_V_ENV = "drmaa_v_env";
public static final String DRMAA_V_EMAIL = "drmaa_v_email";
/*
* DRMAA errno values
*
* do not touch these values are agreed !!!
*/
public static interface DRMAA_ERRNO {
/* -------------- these are relevant to all sections ---------------- */
public static final int DRMAA_ERRNO_SUCCESS = 0; /* Routine returned normally with success. */
public static final int DRMAA_ERRNO_INTERNAL_ERROR = 1; /* Unexpected or internal DRMAA error like memory allocation, system call failure, etc. */
public static final int DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE = 2; /* Could not contact DRM system for this request. */
public static final int DRMAA_ERRNO_AUTH_FAILURE = 3; /* The specified request is not processed successfully due to authorization failure. */
public static final int DRMAA_ERRNO_INVALID_ARGUMENT = 4; /* The input value for an argument is invalid. */
public static final int DRMAA_ERRNO_NO_ACTIVE_SESSION = 5; /* Exit routine failed because there is no active session */
public static final int DRMAA_ERRNO_NO_MEMORY = 6; /* failed allocating memory */
/* -------------- init and exit specific --------------- */
public static final int DRMAA_ERRNO_INVALID_CONTACT_STRING = 7; /* Initialization failed due to invalid contact string. */
public static final int DRMAA_ERRNO_DEFAULT_CONTACT_STRING_ERROR = 8; /* DRMAA could not use the default contact string to connect to DRM system. */
public static final int DRMAA_ERRNO_NO_DEFAULT_CONTACT_STRING_SELECTED = 9; /* No default contact string was provided or selected. DRMAA requires that the default contact string is selected when there is more than one default contact string due to multiple DRMAA implementation contained in the binary module. */
public static final int DRMAA_ERRNO_DRMS_INIT_FAILED = 10; /* Initialization failed due to failure to init DRM system. */
public static final int DRMAA_ERRNO_ALREADY_ACTIVE_SESSION = 11; /* Initialization failed due to existing DRMAA session. */
public static final int DRMAA_ERRNO_DRMS_EXIT_ERROR = 12; /* DRM system disengagement failed. */
/* ---------------- job attributes specific -------------- */
public static final int DRMAA_ERRNO_INVALID_ATTRIBUTE_FORMAT = 13; /* The format for the job attribute value is invalid. */
public static final int DRMAA_ERRNO_INVALID_ATTRIBUTE_VALUE = 14; /* The value for the job attribute is invalid. */
public static final int DRMAA_ERRNO_CONFLICTING_ATTRIBUTE_VALUES = 15; /* The value of this attribute is conflicting with a previously set attributes. */
/* --------------------- job submission specific -------------- */
public static final int DRMAA_ERRNO_TRY_LATER = 16; /* Could not pass job now to DRM system. A retry may succeed however (saturation). */
public static final int DRMAA_ERRNO_DENIED_BY_DRM = 17; /* The DRM system rejected the job. The job will never be accepted due to DRM configuration or job template settings. */
/* ------------------------------- job control specific ---------------- */
public static final int DRMAA_ERRNO_INVALID_JOB = 18; /* The job specified by the 'jobid' does not exist. */
public static final int DRMAA_ERRNO_RESUME_INCONSISTENT_STATE = 19; /* The job has not been suspended. The RESUME request will not be processed. */
public static final int DRMAA_ERRNO_SUSPEND_INCONSISTENT_STATE = 20; /* The job has not been running, and it cannot be suspended. */
public static final int DRMAA_ERRNO_HOLD_INCONSISTENT_STATE = 21; /* The job cannot be moved to a HOLD state. */
public static final int DRMAA_ERRNO_RELEASE_INCONSISTENT_STATE = 22; /* The job is not in a HOLD state. */
public static final int DRMAA_ERRNO_EXIT_TIMEOUT = 23; /* We have encountered a time-out condition for drmaa_synchronize or drmaa_wait. */
public static final int DRMAA_ERRNO_NO_RUSAGE = 24; /* This error code is returned by drmaa_wait() when a job has finished but no rusage and stat data could be provided. */
public static final int DRMAA_ERRNO_NO_MORE_ELEMENTS = 25; /* There are no more elements in the opaque string vector. */
public static final int DRMAA_NO_ERRNO = 26;
}
/*
* Agreed DRMAA job states as returned by drmaa_job_ps()
*/
public static interface DRMAA_PS {
public static final int DRMAA_PS_UNDETERMINED = 0x00; /* process status cannot be determined */
public static final int DRMAA_PS_QUEUED_ACTIVE = 0x10; /* job is queued and active */
public static final int DRMAA_PS_SYSTEM_ON_HOLD = 0x11; /* job is queued and in system hold */
public static final int DRMAA_PS_USER_ON_HOLD = 0x12; /* job is queued and in user hold */
public static final int DRMAA_PS_USER_SYSTEM_ON_HOLD = 0x13; /* job is queued and in user and system hold */
public static final int DRMAA_PS_RUNNING = 0x20; /* job is running */
public static final int DRMAA_PS_SYSTEM_SUSPENDED = 0x21; /* job is system suspended */
public static final int DRMAA_PS_USER_SUSPENDED = 0x22; /* job is user suspended */
public static final int DRMAA_PS_USER_SYSTEM_SUSPENDED = 0x23; /* job is user and system suspended */
public static final int DRMAA_PS_DONE = 0x30; /* job finished normally */
public static final int DRMAA_PS_FAILED = 0x40; /* job finished, but failed */
}
/*
* Agreed DRMAA actions for drmaa_control()
*/
public static interface DRMAA_CONTROL {
public static final int DRMAA_CONTROL_SUSPEND = 0;
public static final int DRMAA_CONTROL_RESUME = 1;
public static final int DRMAA_CONTROL_HOLD = 2;
public static final int DRMAA_CONTROL_RELEASE = 3;
public static final int DRMAA_CONTROL_TERMINATE = 4;
}
/* ------------------- Data types ------------------- */
/*
* Agreed opaque DRMAA job template
* struct drmaa_job_template_s is in japiP.h
*/
//typedef struct drmaa_job_template_s drmaa_job_template_t;
/* ---------- C/C++ language binding specific interfaces -------- */
//typedef struct drmaa_attr_names_s drmaa_attr_names_t;
//typedef struct drmaa_attr_values_s drmaa_attr_values_t;
//typedef struct drmaa_job_ids_s drmaa_job_ids_t;
/*
* get next string attribute from iterator
*
* returns DRMAA_ERRNO_SUCCESS or DRMAA_ERRNO_INVALID_ATTRIBUTE_VALUE
* if no such exists
*/
public static native int drmaa_get_next_attr_name(/* drmaa_attr_names_t* */ Pointer values, Pointer value,
NativeLong value_len);
public static native int drmaa_get_next_attr_value(/* drmaa_attr_names_t* */ Pointer values, Pointer value,
NativeLong value_len);
public static native int drmaa_get_next_job_id(/* drmaa_job_ids_t* */ Pointer values, Pointer value,
NativeLong value_len);
/*
* get element count of opaque string vector
*
* Gives the number of elements in the opaque string vector. Useful for
* copying the contents into an array.
*/
public static native int drmaa_get_num_attr_names(/* drmaa_attr_names_t* */ Pointer values, IntByReference size);
public static native int drmaa_get_num_attr_values(/* drmaa_attr_values_t* */ Pointer values, IntByReference size);
public static native int drmaa_get_num_job_ids(/* drmaa_job_ids_t* */ Pointer values, IntByReference size);
/*
* release opaque string vector
*
* Opaque string vectors can be used without any constraint
* until the release function has been called.
*/
public static native void drmaa_release_attr_names(/* drmaa_attr_names_t* */ Pointer values);
public static native void drmaa_release_attr_values(/* drmaa_attr_values_t* */ Pointer values);
public static native void drmaa_release_job_ids(/* drmaa_job_ids_t* */ Pointer values);
/* ------------------- init/exit routines ------------------- */
/*
* Initialize DRMAA API library and create a new DRMAA Session. 'Contact'
* is an implementation dependent string which MAY be used to specify
* which DRM system to use. This routine MUST be called before any
* other DRMAA calls, except for drmaa_version().
* If 'contact' is NULL, the default DRM system SHALL be used provided there is
* only one DRMAA implementation in the provided binary module. When these is
* more than one DRMAA implementation in the binary module, drmaa_init() SHALL
* return the DRMAA_ERRNO_NO_DEFAULT_CONTACT_STRING_SELECTED error. drmaa_init()
* SHOULD be called by only one of the threads. The main thread is RECOMMENDED.
* A call by another thread SHALL return DRMAA_ERRNO_ALREADY_ACTIVE_SESSION.
* When 'contact' is a a semi-colon separated list of name=value strings, the
* strings will be parsed and interpreted. The current list of accepted names
* is:
* session -- the id of the session to which to reconnect
#if 0
* sge_root -- the SGE_ROOT to use
* sge_cell -- the SGE_CELL to use
#endif
*
* drmaa_init() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
* DRMAA_ERRNO_INVALID_CONTACT_STRING,
* DRMAA_ERRNO_NO_MEMORY,
* DRMAA_ERRNO_ALREADY_ACTIVE_SESSION,
* DRMAA_ERRNO_NO_DEFAULT_CONTACT_STRING_SELECTED, or
* DRMAA_ERRNO_DEFAULT_CONTACT_STRING_ERROR.
*/
public static native int drmaa_init(String contact, Pointer error_diagnosis, NativeLong error_diag_len);
/*
* Disengage from DRMAA library and allow the DRMAA library to perform
* any necessary internal clean up.
* This routine SHALL end the current DRMAA Session, but SHALL NOT effect any
* jobs (e.g., queued and running jobs SHALL remain queued and running).
* drmaa_exit() SHOULD be called by only one of the threads. Other thread calls
* to drmaa_exit() MAY fail since there is no active session.
*
* drmaa_exit() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
* DRMAA_ERRNO_DRMS_EXIT_ERROR or
* DRMAA_ERRNO_NO_ACTIVE_SESSION.
*/
public static native int drmaa_exit(Pointer error_diagnosis, NativeLong error_diag_len);
/* ------------------- job template routines ------------------- */
/*
* Allocate a new job template.
*
* drmaa_allocate_job_template() SHALL return DRMAA_ERRNO_SUCCESS on success,
* otherwise:
* DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE,
* DRMAA_ERRNO_INTERNAL_ERROR or
* DRMAA_ERRNO_NO_MEMORY.
*/
public static native int drmaa_allocate_job_template(/* drmaa_job_template_t** */ PointerByReference jt, Pointer error_diagnosis, NativeLong error_diag_len);
/*
* Deallocate a job template. This routine has no effect on jobs.
*
* drmaa_delete_job_template() SHALL return DRMAA_ERRNO_SUCCESS on success,
* otherwise:
* DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE or
* DRMAA_ERRNO_INTERNAL_ERROR.
*/
public static native int drmaa_delete_job_template(/* drmaa_job_template_t* */ Pointer jt, Pointer error_diagnosis,
NativeLong error_diag_len);
/*
* Adds ('name', 'value') pair to list of attributes in job template 'jt'.
* Only non-vector attributes SHALL be passed.
*
* drmaa_set_attribute() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
* DRMAA_ERRNO_INVALID_ATTRIBUTE_FORMAT,
* DRMAA_ERRNO_INVALID_ARGUMENT,
* DRMAA_ERRNO_NO_MEMORY,
* DRMAA_ERRNO_INVALID_ATTRIBUTE_VALUE or
* DRMAA_ERRNO_CONFLICTING_ATTRIBUTE_VALUES.
*/
public static native int drmaa_set_attribute(/* drmaa_job_template_t* */ Pointer jt, String name,
String value, Pointer error_diagnosis,
NativeLong error_diag_len);
/*
* If 'name' is an existing non-vector attribute name in the job
* template 'jt', then the value of 'name' SHALL be returned; otherwise,
* NULL is returned.
*
* drmaa_get_attribute() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
* DRMAA_ERRNO_INVALID_ATTRIBUTE_VALUE.
*/
public static native int drmaa_get_attribute(/* drmaa_job_template_t* */ Pointer jt, String name, Pointer value,
NativeLong value_len, Pointer error_diagnosis,
NativeLong error_diag_len);
/* Adds ('name', 'values') pair to list of vector attributes in job template
* 'jt'. Only vector attributes SHALL be passed.
* A 'value' string vector containing n elements must be n+1 elements long, with
* the nth value, i.e. value[n], being set to NULL as a delimitor.
*
* drmaa_set_vector_attribute() SHALL return DRMAA_ERRNO_SUCCESS on success,
* otherwise:
* DRMAA_ERRNO_INVALID_ATTRIBUTE_FORMAT,
* DRMAA_ERRNO_INVALID_ARGUMENT,
* DRMAA_ERRNO_NO_MEMORY,
* DRMAA_ERRNO_CONFLICTING_ATTRIBUTE_VALUES.
*/
public static native int drmaa_set_vector_attribute(/* drmaa_job_template_t* */ Pointer jt, String name,
Pointer value, Pointer error_diagnosis,
NativeLong error_diag_len);
/*
* If 'name' is an existing vector attribute name in the job template 'jt',
* then the values of 'name' are returned; otherwise, NULL is returned.
*
* drmaa_get_vector_attribute() SHALL return DRMAA_ERRNO_SUCCESS on success,
* otherwise:
* DRMAA_ERRNO_INVALID_ATTRIBUTE_VALUE.
*/
public static native int drmaa_get_vector_attribute(/* drmaa_job_template_t* */ Pointer jt, String name,
/* drmaa_attr_values_t ** */ PointerByReference values,
Pointer error_diagnosis, NativeLong error_diag_len);
/*
* SHALL return the set of supported attribute names whose associated
* value type is String. This set SHALL include supported DRMAA reserved
* attribute names and native attribute names.
*
* drmaa_get_attribute_names() SHALL return DRMAA_ERRNO_SUCCESS on success,
* otherwise:
* DRMAA_ERRNO_NO_MEMORY.
*/
public static native int drmaa_get_attribute_names(/* drmaa_attr_names_t ** */ PointerByReference values,
Pointer error_diagnosis, NativeLong error_diag_len);
/*
* SHALL return the set of supported attribute names whose associated
* value type is String Vector. This set SHALL include supported DRMAA reserved
* attribute names and native attribute names.
*
* drmaa_get_vector_attribute_names() SHALL return DRMAA_ERRNO_SUCCESS on
* success, otherwise:
* DRMAA_ERRNO_NO_MEMORY.
*/
public static native int drmaa_get_vector_attribute_names(/* drmaa_attr_names_t ** */ PointerByReference values,
Pointer error_diagnosis,
NativeLong error_diag_len);
/* ------------------- job submission routines ------------------- */
/*
* Submit a job with attributes defined in the job template 'jt'.
* The job identifier 'job_id' is a printable, NULL terminated string,
* identical to that returned by the underlying DRM system.
*
* drmaa_run_job() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
* DRMAA_ERRNO_TRY_LATER,
* DRMAA_ERRNO_DENIED_BY_DRM,
* DRMAA_ERRNO_NO_MEMORY,
* DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE or
* DRMAA_ERRNO_AUTH_FAILURE.
*/
public static native int drmaa_run_job(Pointer job_id, NativeLong job_id_len,
/* drmaa_job_template_t * */ Pointer jt, Pointer error_diagnosis,
NativeLong error_diag_len);
/*
* Submit a set of parametric jobs, dependent on the implied loop index, each
* with attributes defined in the job template 'jt'.
* The job identifiers 'job_ids' SHALL all be printable,
* NULL terminated strings, identical to those returned by the underlying
* DRM system. Nonnegative loop bounds SHALL NOT use file names
* that start with minus sign like command line options.
* DRMAA defines a special index placeholder, drmaa_incr_ph, (which has the
* value "$incr_pl$") that is used to construct parametric job templates.
* For example:
* //C++ string syntax used
* drmaa_set_attribute(pjt, "stderr", drmaa_incr_ph + ".err" );
*
* drmaa_run_bulk_jobs() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
* DRMAA_ERRNO_TRY_LATER,
* DRMAA_ERRNO_DENIED_BY_DRM,
* DRMAA_ERRNO_NO_MEMORY,
* DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE or
* DRMAA_ERRNO_AUTH_FAILURE.
*/
public static native int drmaa_run_bulk_jobs(/* drmaa_job_ids_t ** */ PointerByReference jobids,
/* drmaa_job_template_t * */ Pointer jt, int start, int end,
int incr, Pointer error_diagnosis, NativeLong error_diag_len);
/* ------------------- job control routines ------------------- */
/*
* Start, stop, restart, or kill the job identified by 'job_id'.
* If 'job_id' is DRMAA_JOB_IDS_SESSION_ALL then this routine
* acts on all jobs *submitted* during this DRMAA session.
* The legal values for 'action' and their meanings SHALL be:
* DRMAA_CONTROL_SUSPEND: stop the job,
* DRMAA_CONTROL_RESUME: (re)start the job,
* DRMAA_CONTROL_HOLD: put the job on-hold,
* DRMAA_CONTROL_RELEASE: release the hold on the job, and
* DRMAA_CONTROL_TERMINATE: kill the job.
*
* This routine SHALL return once the action has been acknowledged by
* the DRM system, but does not necessarily wait until the action
* has been completed.
*
* drmaa_control() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
* DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE,
* DRMAA_ERRNO_AUTH_FAILURE,
* DRMAA_ERRNO_NO_MEMORY,
* DRMAA_ERRNO_RESUME_INCONSISTENT_STATE,
* DRMAA_ERRNO_SUSPEND_INCONSISTENT_STATE,
* DRMAA_ERRNO_HOLD_INCONSISTENT_STATE,
* DRMAA_ERRNO_RELEASE_INCONSISTENT_STATE or
* DRMAA_ERRNO_INVALID_JOB.
*/
public static native int drmaa_control(String jobid, int action, Pointer error_diagnosis,
NativeLong error_diag_len);
/*
* Wait until all jobs specified by 'job_ids' have finished
* execution. If 'job_ids' is DRMAA_JOB_IDS_SESSION_ALL then this routine
* waits for all jobs *submitted* during this DRMAA session. The timeout value
* is used to specify the number of seconds to wait for the job to fail finish
* before returning if a result is not immediately available. The value
* DRMAA_TIMEOUT_WAIT_FOREVER can be used to specify that routine should wait
* indefinitely for a result. The value DRMAA_TIMEOUT_NO_WAIT can be used to
* specify that the routine should return immediately if no result is available.
* If the call exits before timeout, all the jobs have
* been waited on or there was an interrupt.
* If the invocation exits on timeout, the return code is
* DRMAA_ERRNO_EXIT_TIMEOUT. The caller SHOULD check system time before and
* after this call in order to check how much time has passed.
*
* The dispose parameter specifies how to treat reaping information:
* True=1 "fake reap", i.e. dispose of the rusage data
* False=0 do not reap
*
* A 'job_ids' string vector containing n elements must be n+1 elements long,
* with the nth value, i.e. job_ids[n], being set to NULL as a delimitor.
*
* drmaa_synchronize() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
* DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE,
* DRMAA_ERRNO_AUTH_FAILURE,
* DRMAA_ERRNO_NO_MEMORY,
* DRMAA_ERRNO_EXIT_TIMEOUT or
* DRMAA_ERRNO_INVALID_JOB.
*/
public static native int drmaa_synchronize(Pointer job_ids, NativeLong timeout, int dispose,
Pointer error_diagnosis, NativeLong error_diag_len);
/*
* This routine SHALL wait for a job with job_id to fail or finish execution. If
* the special string, DRMAA_JOB_IDS_SESSION_ANY is provided as the job_id,
* this routine SHALL wait for any job from the session. This routine is modeled
* on the wait3 POSIX routine. The timeout value is used to specify the number
* of seconds to wait for the job to fail finish before returning if a result is
* not immediately available. The value DRMAA_TIMEOUT_WAIT_FOREVER can be
* used to specify that routine should wait indefinitely for a result. The value
* DRMAA_TIMEOUT_NO_WAIT may be specified that the routine should return
* immediately if no result is available.
* If the call exits before timeout ,the job has been waited on
* successfully or there was an interrupt.
* If the invocation exits on timeout, the return code is
* DRMAA_ERRNO_EXIT_TIMEOUT. The caller SHOULD check system time before and
* after this call in order to check how much time has passed.
* The routine reaps jobs on a successful call, so any subsequent calls
* to drmaa_wait SHOULD fail returning an error DRMAA_ERRNO_INVALID_JOB meaning
* that the job has been already reaped. This error is the same as if the job
* was unknown. Failing due to an elapsed timeout has an effect that it is
* possible to issue drmaa_wait multiple times for the same job_id. When
* successful, the rusage information SHALL be provided as an array of strings,
* where each string complies with the format <name>=<value>. The string portion
* <value> contains the amount of resources consumed by the job and is opaque.
* The 'stat' drmaa_wait parameter is used in the drmaa_w* functions for
* providing more detailed information about job termination if available. An
* analogous set of macros is defined in POSIX for analyzing the wait3(2) OUT
* parameter 'stat'.
*
* drmaa_wait() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
* DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE,
* DRMAA_ERRNO_AUTH_FAILURE,
* DRMAA_ERRNO_NO_RUSAGE,
* DRMAA_ERRNO_NO_MEMORY,
* DRMAA_ERRNO_EXIT_TIMEOUT or
* DRMAA_ERRNO_INVALID_JOB.
*/
public static native int drmaa_wait(String job_id, Pointer job_id_out, NativeLong job_id_out_len,
IntByReference stat, NativeLong timeout, /* drmaa_attr_values_t ** */ PointerByReference rusage,
Pointer error_diagnosis, NativeLong error_diag_len);
/*
* Evaluates into 'exited' a non-zero value if stat was returned for a
* job that terminated normally. A zero value can also indicate that
* altough the job has terminated normally an exit status is not available
* or that it is not known whether the job terminated normally. In both
* cases drmaa_wexitstatus() SHALL NOT provide exit status information.
* A non-zero 'exited' value indicates more detailed diagnosis can be provided
* by means of drmaa_wifsignaled(), drmaa_wtermsig() and drmaa_wcoredump().
*/
public static native int drmaa_wifexited(IntByReference exited, int stat, Pointer error_diagnosis,
NativeLong error_diag_len);
/*
* If the OUT parameter 'exited' of drmaa_wifexited() is non-zero,
* this function evaluates into 'exit_code' the exit code that the
* job passed to _exit() (see exit(2)) or exit(3C), or the value that
* the child process returned from main.
*/
public static native int drmaa_wexitstatus(IntByReference exit_status, int stat, Pointer error_diagnosis,
NativeLong error_diag_len);
/*
* Evaluates into 'signaled' a non-zero value if status was returned
* for a job that terminated due to the receipt of a signal. A zero value
* can also indicate that altough the job has terminated due to the receipt
* of a signal the signal is not available or that it is not known whether
* the job terminated due to the receipt of a signal. In both cases
* drmaa_wtermsig() SHALL NOT provide signal information.
*/
public static native int drmaa_wifsignaled(IntByReference signaled, int stat, Pointer error_diagnosis,
NativeLong error_diag_len);
/*
* If the OUT parameter 'signaled' of drmaa_wifsignaled(stat) is
* non-zero, this function evaluates into signal a string representation of the
* signal that caused the termination of the job. For signals declared by POSIX,
* the symbolic names SHALL be returned (e.g., SIGABRT, SIGALRM).
* For signals not declared by POSIX, any other string MAY be returned.
*/
public static native int drmaa_wtermsig(Pointer signal, NativeLong signal_len, int stat,
Pointer error_diagnosis, NativeLong error_diag_len);
/*
* If the OUT parameter 'signaled' of drmaa_wifsignaled(stat) is
* non-zero, this function evaluates into 'core_dumped' a non-zero value
* if a core image of the terminated job was created.
*/
public static native int drmaa_wcoredump(IntByReference core_dumped, int stat, Pointer error_diagnosis,
NativeLong error_diag_len);
/*
* Evaluates into 'aborted' a non-zero value if 'stat'
* was returned for a job that ended before entering the running state.
*/
public static native int drmaa_wifaborted(IntByReference aborted, int stat, Pointer error_diagnosis,
NativeLong error_diag_len);
/*
* Get the program status of the job identified by 'job_id'.
* The possible values returned in 'remote_ps' and their meanings SHALL be:
*
* DRMAA_PS_UNDETERMINED = 0x00: process status cannot be determined
* DRMAA_PS_QUEUED_ACTIVE = 0x10: job is queued and active
* DRMAA_PS_SYSTEM_ON_HOLD = 0x11: job is queued and in system hold
* DRMAA_PS_USER_ON_HOLD = 0x12: job is queued and in user hold
* DRMAA_PS_USER_SYSTEM_ON_HOLD = 0x13: job is queued and in user and system
* hold
* DRMAA_PS_RUNNING = 0x20: job is running
* DRMAA_PS_SYSTEM_SUSPENDED = 0x21: job is system suspended
* DRMAA_PS_USER_SUSPENDED = 0x22: job is user suspended
* DRMAA_PS_USER_SYSTEM_SUSPENDED = 0x23: job is user and system suspended
* DRMAA_PS_DONE = 0x30: job finished normally
* DRMAA_PS_FAILED = 0x40: job finished, but failed
*
* DRMAA SHOULD always get the status of job_id from DRM system, unless the
* previous status has been DRMAA_PS_FAILED or DRMAA_PS_DONE and the status has
* been successfully cached. Terminated jobs get DRMAA_PS_FAILED status.
*
* drmaa_synchronize() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
* DRMAA_ERRNO_DRM_COMMUNICATION_FAILURE,
* DRMAA_ERRNO_AUTH_FAILURE,
* DRMAA_ERRNO_NO_MEMORY or
* DRMAA_ERRNO_INVALID_JOB.
*/
public static native int drmaa_job_ps(String job_id, IntByReference remote_ps, Pointer error_diagnosis,
NativeLong error_diag_len);
/* ------------------- auxiliary routines ------------------- */
/*
* SHALL return the error message text associated with the errno number. The
* routine SHALL return null string if called with invalid ERRNO number.
*/
public static native String drmaa_strerror(int drmaa_errno);
/*
* If called before drmaa_init(), it SHALL return a comma delimited default
* DRMAA implementation contacts string, one per each DRM system provided
* implementation. If called after drmaa_init(), it SHALL return the selected
* contact string. The output string is Implementation dependent.
* drmaa_get_contact() SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
* DRMAA_ERRNO_INTERNAL_ERROR.
*/
public static native int drmaa_get_contact(Pointer contact, NativeLong contact_len,
Pointer error_diagnosis, NativeLong error_diag_len);
/*
* OUT major - major version number (non-negative integer)
* OUT minor - minor version number (non-negative integer)
* SHALL return the major and minor version numbers of the DRMAA library;
* for DRMAA 1.0, 'major' is 1 and 'minor' is 0.
*/
public static native int drmaa_version(IntByReference major, IntByReference minor,
Pointer error_diagnosis, NativeLong error_diag_len);
/*
* If called before drmaa_init(), it SHALL return a comma delimited DRM systems
* string, one per each DRM system provided implementation. If called after
* drmaa_init(), it SHALL return the selected DRM system. The output string is
* implementation dependent.
*
* drmaa_get_DRM_system() SHALL return DRMAA_ERRNO_SUCCESS on success,
* otherwise:
* DRMAA_ERRNO_INTERNAL_ERROR.
*/
public static native int drmaa_get_DRM_system(Pointer drm_system, NativeLong drm_system_len,
Pointer error_diagnosis, NativeLong error_diag_len);
/*
* If called before drmaa_init(), it SHALL return a comma delimited DRMAA
* implementations string, one per each DRM system provided implementation. If
* called after drmaa_init(), it SHALL return the selected DRMAA implementation.
* The output (string) is implementation dependent. drmaa_get_DRM_implementation
* routine SHALL return DRMAA_ERRNO_SUCCESS on success, otherwise:
* DRMAA_ERRNO_INTERNAL_ERROR.
*/
public static native int drmaa_get_DRMAA_implementation(Pointer drmaa_impl, NativeLong drmaa_impl_len,
Pointer error_diagnosis, NativeLong error_diag_len);
}

View File

@ -91,6 +91,54 @@ public class LibBat {
Native.register("bat");
}
// Via support@platform.com:
// For equivalent api of bsub -a "xxx aaa qqq", option -a is not in struct submit, we
// have to use setOption_ to set it. setOption_ can be used in user program by including
// cmd.h or opensource.h of LSF opensource. You can refer to cmd.sub.c in opensource.
//
// Here is a demonstration on the api for bsub -a
// =========================================================================
// /*define external setOption_ function*/
// extern int setOption_(int argc, char **argv, char *template,
// struct submit *req, int mask, int mask2, char **errMsg);
//
// int setEsub(char *esub, struct submit *req) {
// int x;
// char *template, *arg[3];
// /*set esub with the following strings and set array length*/
// arg[0] = "blah";
// arg[1] = "-a";
// arg[2] = test;
// /* -a "test", You can add additional esubs in here. Just make sure they're space delimited. ie. "test mpich lammpi" */
// x=3;
// /*set template*/
// template = "a:"
// /*run setOption_()*/
// if (setOption_(x, arg, template, req, ~0, ~0, ~0, NULL) == -1) {
// return(-1);
// }
// else {
// return(0);
// }
// }
// =========================================================================
/**
* Used for setting esub and other options not in struct submit.
* Via support@platform.com
*
* @param argc number of args
* @param argv arguments including a first argument that will not be used
* @param template a colon delimited list of arguments in getopt format
* @param jobSubReq the lsf submit
* @param mask unknown
* @param mask2 unknown
* @param mask3 unknown
* @param errMsg unknown
* @return -1 if the option setting failed
*/
public static native int setOption_(int argc, Pointer argv, String template, submit jobSubReq, int mask, int mask2, int mask3, Pointer errMsg);
/** Max job name length as defined by 'man bsub'. */
public static final int MAX_JOB_NAME_LEN = 4094;
@ -9690,8 +9738,10 @@ public class LibBat {
* for a service class.
*/
public enum objectives {
GOAL_DEADLINE, GOAL_VELOCITY, GOAL_THROUGHPUT
public static interface objectives {
public static int GOAL_DEADLINE = 0;
public static int GOAL_VELOCITY = 1;
public static int GOAL_THROUGHPUT = 2;
}
@ -15109,52 +15159,46 @@ public static class ByValue extends jobArrayElementLog implements Structure.ByVa
* \addtogroup _consumertype _consumertype
* consumer types
*/
public static enum consumerType {
public static interface consumerType {
/**
* < Queues
*/
LIMIT_QUEUES(1),
public static final int LIMIT_QUEUES = 1;
/**
* < Per-queue
*/
LIMIT_PER_QUEUE(2),
public static final int LIMIT_PER_QUEUE = 2;
/**
* < Users
*/
LIMIT_USERS(3),
public static final int LIMIT_USERS = 3;
/**
* < Per-users
*/
LIMIT_PER_USER(4),
public static final int LIMIT_PER_USER = 4;
/**
* < Hosts
*/
LIMIT_HOSTS(5),
public static final int LIMIT_HOSTS = 5;
/**
* < Per-host
*/
LIMIT_PER_HOST(6),
public static final int LIMIT_PER_HOST = 6;
/**
* < Projects
*/
LIMIT_PROJECTS(7),
public static final int LIMIT_PROJECTS = 7;
/**
* < Per-project
*/
LIMIT_PER_PROJECT(8);
private int value;
private consumerType(int value) {
this.value = value;
}
public static final int LIMIT_PER_PROJECT = 8;
}
@ -19011,20 +19055,27 @@ public static class ByValue extends jobArrayElementLog implements Structure.ByVa
/* [] mis-matched in RMS[] */
public static final int RMS_BRACKETS_MISMATCH_ERR = (-22);
public static enum rmsAllocType_t {
RMS_ALLOC_TYPE_UNKNOWN, RMS_ALLOC_TYPE_SLOAD, RMS_ALLOC_TYPE_SNODE, RMS_ALLOC_TYPE_MCONT
public static interface rmsAllocType_t {
public static final int RMS_ALLOC_TYPE_UNKNOWN = 0;
public static final int RMS_ALLOC_TYPE_SLOAD = 1;
public static final int RMS_ALLOC_TYPE_SNODE = 2;
public static final int RMS_ALLOC_TYPE_MCONT = 3;
}
public static enum rmsTopology_t {
RMS_TOPOLOGY_UNKNOWN, RMS_TOPOLOGY_PTILE, RMS_TOPOLOGY_NODES
public static interface rmsTopology_t {
public static final int RMS_TOPOLOGY_UNKNOWN = 0;
public static final int RMS_TOPOLOGY_PTILE = 1;
public static final int RMS_TOPOLOGY_NODES = 2;
}
public static enum rmsFlags_t {
RMS_FLAGS_UNKNOWN, RMS_FLAGS_RAILS, RMS_FLAGS_RAILMASK
public static interface rmsFlags_t {
public static final int RMS_FLAGS_UNKNOWN = 0;
public static final int RMS_FLAGS_RAILS = 1;
public static final int RMS_FLAGS_RAILMASK = 2;
}

View File

@ -495,14 +495,19 @@ public class LibLsf {
public enum valueType {
LS_BOOLEAN, LS_NUMERIC, LS_STRING, LS_EXTERNAL
public static interface valueType {
public static final int LS_BOOLEAN = 0;
public static final int LS_NUMERIC = 1;
public static final int LS_STRING = 2;
public static final int LS_EXTERNAL = 3;
}
public enum orderType {
INCR, DECR, NA
public static interface orderType {
public static final int INCR = 0;
public static final int DECR = 1;
public static final int NA = 2;
}
@ -1567,8 +1572,13 @@ public class LibLsf {
public static final int NIO_TASK_ALL = 0x03;
public static final int NIO_TASK_CONNECTED = 0x04;
public static enum nioType {
NIO_STATUS, NIO_STDOUT, NIO_EOF, NIO_IOERR, NIO_REQUEUE, NIO_STDERR
public static interface nioType {
public static final int NIO_STATUS = 0;
public static final int NIO_STDOUT = 1;
public static final int NIO_EOF = 2;
public static final int NIO_IOERR = 3;
public static final int NIO_REQUEUE = 4;
public static final int NIO_STDERR = 5;
}

View File

@ -144,6 +144,9 @@ public abstract class ArgumentDefinitionField extends ArgumentField {
} else if ("input_file".equals(argumentDefinition.fullName) && argumentDefinition.ioType == ArgumentIOType.INPUT) {
return Arrays.asList(new InputTaggedFileDefinitionField(argumentDefinition), new InputIndexesArgumentField(argumentDefinition, BAMIndex.BAMIndexSuffix, ".bam"));
} else if ((RodBinding.class.equals(argumentDefinition.argumentType) || RodBinding.class.equals(argumentDefinition.componentType)) && argumentDefinition.ioType == ArgumentIOType.INPUT) {
return Arrays.asList(new InputTaggedFileDefinitionField(argumentDefinition), new InputIndexesArgumentField(argumentDefinition, Tribble.STANDARD_INDEX_EXTENSION));
} else if (argumentDefinition.ioType == ArgumentIOType.INPUT) {
return Collections.singletonList(new InputArgumentField(argumentDefinition));
@ -196,7 +199,7 @@ public abstract class ArgumentDefinitionField extends ArgumentField {
}
// if (intervalFields.contains(argumentDefinition.fullName) && argumentDefinition.ioType == ArgumentIOType.INPUT)
// Change intervals exclusize of intervalsString.
// Change intervals exclusive of intervalsString.
private static class IntervalFileArgumentField extends InputArgumentField {
public IntervalFileArgumentField(ArgumentDefinition argumentDefinition) {
super(argumentDefinition);
@ -332,9 +335,7 @@ public abstract class ArgumentDefinitionField extends ArgumentField {
}
}
/**
* The other extreme of a NamedRodBindingField, allows the user to specify the track name, track type, and the file.
*/
// Allows the user to specify the track name, track type, and the file.
public static class RodBindArgumentField extends ArgumentDefinitionField {
public RodBindArgumentField(ArgumentDefinition argumentDefinition) {
super(argumentDefinition);
@ -347,25 +348,28 @@ public abstract class ArgumentDefinitionField extends ArgumentField {
}
}
/**
* Named input_files.
*/
// Tagged input_files or other rods.
public static class InputTaggedFileDefinitionField extends ArgumentDefinitionField {
public InputTaggedFileDefinitionField(ArgumentDefinition argumentDefinition) {
super(argumentDefinition);
}
@Override protected Class<?> getInnerType() { return null; } // TaggedFile does not need to be imported.
@Override protected String getFieldType() { return "List[File]"; }
@Override protected String getDefaultValue() { return "Nil"; }
@Override protected String getFieldType() { return argumentDefinition.isMultiValued ? "List[File]" : "File"; }
@Override protected String getDefaultValue() { return argumentDefinition.isMultiValued ? "Nil" : "_"; }
@Override protected String getCommandLineTemplate() {
return " + repeat(\"\", %3$s, format=TaggedFile.formatCommandLine(\"%1$s\"))";
if (argumentDefinition.isMultiValued) {
return " + repeat(\"\", %3$s, format=TaggedFile.formatCommandLine(\"%1$s\"))";
} else if (!argumentDefinition.required) {
return " + optional(\"\", %3$s, format=TaggedFile.formatCommandLine(\"%1$s\"))";
} else {
return " + TaggedFile.formatCommandLine(\"%1$s\")(\"\", %3$s, \"\")";
}
}
}
/**
* Adds optional inputs for the indexes of any bams or sams added to this function.
*/
// Adds optional inputs for the indexes of any rods added to this function.
private static class InputIndexesArgumentField extends ArgumentField {
private final boolean originalIsMultiValued;
private final String indexFieldName;
private final String originalFieldName;
private final String indexSuffix;
@ -374,14 +378,19 @@ public abstract class ArgumentDefinitionField extends ArgumentField {
this(originalArgumentDefinition, indexSuffix, null);
}
public InputIndexesArgumentField(ArgumentDefinition originalArgumentDefinition, String indexSuffix, String originalSuffix) {
this.indexFieldName = originalArgumentDefinition.fullName + "Indexes";
this.originalIsMultiValued = originalArgumentDefinition.isMultiValued;
this.indexFieldName = originalArgumentDefinition.fullName + "Index" + (originalIsMultiValued ? "es" : "");
this.originalFieldName = originalArgumentDefinition.fullName;
this.indexSuffix = indexSuffix;
this.originalSuffix = originalSuffix;
}
@Override protected Class<? extends Annotation> getAnnotationIOClass() { return Input.class; }
@Override public String getCommandLineAddition() { return ""; }
@Override protected String getDoc() { return "Dependencies on any indexes of " + this.originalFieldName; }
@Override protected String getDoc() {
return originalIsMultiValued
? "Dependencies on any indexes of " + this.originalFieldName
: "Dependencies on the index of " + this.originalFieldName;
}
@Override protected String getFullName() { return this.indexFieldName; }
@Override protected boolean isRequired() { return false; }
@Override protected String getFieldType() { return "List[File]"; }
@ -389,24 +398,41 @@ public abstract class ArgumentDefinitionField extends ArgumentField {
@Override protected Class<?> getInnerType() { return File.class; }
@Override protected String getRawFieldName() { return this.indexFieldName; }
@Override protected String getFreezeFields() {
if (originalSuffix == null) {
return String.format(
("%1$s ++= %2$s" +
".filter(orig => orig != null)" +
".map(orig => new File(orig.getPath + \"%3$s\"))%n"),
indexFieldName, originalFieldName, indexSuffix);
if (originalIsMultiValued) {
if (originalSuffix == null) {
return String.format(
("%1$s ++= %2$s" +
".filter(orig => orig != null)" +
".map(orig => new File(orig.getPath + \"%3$s\"))%n"),
indexFieldName, originalFieldName, indexSuffix);
} else {
return String.format(
("%1$s ++= %2$s" +
".filter(orig => orig != null && orig.getName.endsWith(\"%4$s\"))" +
".flatMap(orig => Array(" +
" new File(orig.getPath + \"%3$s\")," +
" new File(orig.getPath.stripSuffix(\"%4$s\") + \"%3$s\") ))%n"),
indexFieldName, originalFieldName, indexSuffix, originalSuffix);
}
} else {
return String.format(
("%1$s ++= %2$s" +
".filter(orig => orig != null && orig.getName.endsWith(\"%4$s\"))" +
".flatMap(orig => Array(" +
" new File(orig.getPath + \"%3$s\")," +
" new File(orig.getPath.stripSuffix(\"%4$s\") + \"%3$s\") ))%n"),
indexFieldName, originalFieldName, indexSuffix, originalSuffix);
if (originalSuffix == null) {
return String.format(
("if (%2$s != null)%n " +
"%1$s :+= new File(%2$s.getPath + \"%3$s\")%n"),
indexFieldName, originalFieldName, indexSuffix);
} else {
return String.format(
("if (%2$s != null && %2$s.getName.endsWith(\"%4$s\"))%n " +
"%1$s ++= Array(" +
" new File(%2$s.getPath + \"%3$s\")," +
" new File(%2$s.getPath.stripSuffix(\"%4$s\") + \"%3$s\") )%n"),
indexFieldName, originalFieldName, indexSuffix, originalSuffix);
}
}
}
}
// Tracks an automatically generated index
private static abstract class OutputIndexArgumentField extends ArgumentField {
protected final String indexFieldName;
protected final String originalFieldName;
@ -456,6 +482,7 @@ public abstract class ArgumentDefinitionField extends ArgumentField {
}
}
// Allows setting the format for floats and doubles
private static class FormatterArgumentField extends ArgumentField {
private final ArgumentField argumentField;
public FormatterArgumentField(ArgumentField argumentField) {

View File

@ -154,9 +154,45 @@ public abstract class AbstractVCFCodec implements FeatureCodec, NameAwareCodec,
* @return a feature, (not guaranteed complete) that has the correct start and stop
*/
public Feature decodeLoc(String line) {
return reallyDecode(line);
String[] locParts = new String[6];
ParsingUtils.split(line, locParts, VCFConstants.FIELD_SEPARATOR_CHAR, true);
// get our alleles (because the end position depends on them)
String ref = getCachedString(locParts[3].toUpperCase());
String alts = getCachedString(locParts[4].toUpperCase());
List<Allele> alleles = parseAlleles(ref, alts, lineNo);
// find out our location
int start = Integer.valueOf(locParts[1]);
int stop = start;
// ref alleles don't need to be single bases for monomorphic sites
if ( alleles.size() == 1 ) {
stop = start + alleles.get(0).length() - 1;
} else if ( !isSingleNucleotideEvent(alleles) ) {
stop = clipAlleles(start, ref, alleles, null, lineNo);
}
return new VCFLocFeature(locParts[0], start, stop);
}
private final static class VCFLocFeature implements Feature {
final String chr;
final int start, stop;
private VCFLocFeature(String chr, int start, int stop) {
this.chr = chr;
this.start = start;
this.stop = stop;
}
public String getChr() { return chr; }
public int getStart() { return start; }
public int getEnd() { return stop; }
}
/**
* decode the line into a feature (VariantContext)
* @param line the line
@ -207,7 +243,7 @@ public abstract class AbstractVCFCodec implements FeatureCodec, NameAwareCodec,
// parse out the required fields
String contig = getCachedString(parts[0]);
long pos = Long.valueOf(parts[1]);
int pos = Integer.valueOf(parts[1]);
String id = null;
if ( parts[2].length() == 0 )
generateException("The VCF specification requires a valid ID field");
@ -227,7 +263,7 @@ public abstract class AbstractVCFCodec implements FeatureCodec, NameAwareCodec,
Map<String, Object> attributes = parseInfo(info, id);
// find out our current location, and clip the alleles down to their minimum length
long loc = pos;
int loc = pos;
// ref alleles don't need to be single bases for monomorphic sites
if ( alleles.size() == 1 ) {
loc = pos + alleles.get(0).length() - 1;
@ -506,9 +542,9 @@ public abstract class AbstractVCFCodec implements FeatureCodec, NameAwareCodec,
* @param ref the reference string
* @param unclippedAlleles the list of unclipped alleles
* @param clippedAlleles output list of clipped alleles
* @return a list of alleles, clipped to the reference
* @return the new reference end position of this event
*/
protected static long clipAlleles(long position, String ref, List<Allele> unclippedAlleles, List<Allele> clippedAlleles, int lineNo) {
protected static int clipAlleles(int position, String ref, List<Allele> unclippedAlleles, List<Allele> clippedAlleles, int lineNo) {
// Note that the computation of forward clipping here is meant only to see whether there is a common
// base to all alleles, and to correctly compute reverse clipping,
@ -534,11 +570,13 @@ public abstract class AbstractVCFCodec implements FeatureCodec, NameAwareCodec,
if (clipping) reverseClipped++;
}
for (Allele a : unclippedAlleles) {
if (a.isSymbolic()) {
clippedAlleles.add(a);
} else {
clippedAlleles.add(Allele.create(Arrays.copyOfRange(a.getBases(),0,a.getBases().length-reverseClipped),a.isReference()));
if ( clippedAlleles != null ) {
for ( Allele a : unclippedAlleles ) {
if ( a.isSymbolic() ) {
clippedAlleles.add(a);
} else {
clippedAlleles.add(Allele.create(Arrays.copyOfRange(a.getBases(),0,a.getBases().length-reverseClipped),a.isReference()));
}
}
}

View File

@ -822,8 +822,11 @@ public class VariantContext implements Feature { // to enable tribble intergrati
// ---------------------------------------------------------------------------------------------------------
private void loadGenotypes() {
if ( !hasAttribute(UNPARSED_GENOTYPE_MAP_KEY) )
if ( !hasAttribute(UNPARSED_GENOTYPE_MAP_KEY) ) {
if ( genotypes == null )
genotypes = NO_GENOTYPES;
return;
}
Object parserObj = getAttribute(UNPARSED_GENOTYPE_PARSER_KEY);
if ( parserObj == null || !(parserObj instanceof VCFParser) )

View File

@ -0,0 +1,145 @@
/*
* Copyright (c) 2011, The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.jna.drmaa.v1_0;
import org.apache.commons.io.FileUtils;
import org.broadinstitute.sting.BaseTest;
import org.ggf.drmaa.*;
import org.testng.Assert;
import org.testng.annotations.Test;
import java.io.File;
import java.util.*;
public class JnaSessionIntegrationTest extends BaseTest {
private static final SessionFactory factory = new JnaSessionFactory();
@Test
public void testDrmaa() throws Exception {
Session session = factory.getSession();
Version version = session.getVersion();
System.out.println(String.format("DRMAA version: %d.%d", version.getMajor(), version.getMinor()));
System.out.println(String.format("DRMAA contact(s): %s", session.getContact()));
System.out.println(String.format("DRM system(s): %s", session.getDrmSystem()));
System.out.println(String.format("DRMAA implementation(s): %s", session.getDrmaaImplementation()));
}
@Test
public void testSubmitEcho() throws Exception {
File outFile = createNetworkTempFile("JnaSessionIntegrationTest-", ".out");
Session session = factory.getSession();
session.init(null);
try {
JobTemplate template = session.createJobTemplate();
template.setRemoteCommand("sh");
template.setOutputPath(":" + outFile.getAbsolutePath());
template.setJoinFiles(true);
template.setArgs(Arrays.asList("-c", "echo \"Hello world.\""));
String jobId = session.runJob(template);
System.out.println(String.format("Job id %s", jobId));
session.deleteJobTemplate(template);
System.out.println("Waiting for job to run: " + jobId);
int remotePs = Session.QUEUED_ACTIVE;
List<Integer> runningStatuses = Arrays.asList(Session.QUEUED_ACTIVE, Session.RUNNING);
while (runningStatuses.contains(remotePs)) {
Thread.sleep(30 * 1000L);
remotePs = session.getJobProgramStatus(jobId);
}
Assert.assertEquals(remotePs, Session.DONE, "Job status is not DONE.");
JobInfo jobInfo = session.wait(jobId, Session.TIMEOUT_NO_WAIT);
Assert.assertTrue(jobInfo.hasExited(), String.format("Job did not exit cleanly: %s", jobId));
Assert.assertEquals(jobInfo.getExitStatus(), 0, String.format("Exit status for jobId %s is non-zero", jobId));
if (jobInfo.hasSignaled())
Assert.fail(String.format("JobId %s exited with signal %s and core dump flag %s", jobId, jobInfo.getTerminatingSignal(), jobInfo.hasCoreDump()));
Assert.assertFalse(jobInfo.wasAborted(), String.format("Job was aborted: %s", jobId));
} finally {
session.exit();
}
Assert.assertTrue(FileUtils.waitFor(outFile, 120), "File not found: " + outFile.getAbsolutePath());
System.out.println("--- output ---");
System.out.println(FileUtils.readFileToString(outFile));
System.out.println("--- output ---");
Assert.assertTrue(outFile.delete(), "Unable to delete " + outFile.getAbsolutePath());
System.out.println("Validating that we reached the end of the test without exit.");
}
@Test
public void testCollectionConversions() {
Collection<String> list = Arrays.asList("a=1", "foo=bar", "empty=");
Map<String, String> map = new LinkedHashMap<String, String>();
map.put("a", "1");
map.put("foo", "bar");
map.put("empty", "");
Assert.assertEquals(JnaSession.collectionToMap(list), map);
Assert.assertEquals(JnaSession.mapToCollection(map), list);
}
@Test
public void testLimitConversions() {
Assert.assertEquals(JnaSession.formatLimit(0), "0:00:00");
Assert.assertEquals(JnaSession.formatLimit(59), "0:00:59");
Assert.assertEquals(JnaSession.formatLimit(60), "0:01:00");
Assert.assertEquals(JnaSession.formatLimit(3540), "0:59:00");
Assert.assertEquals(JnaSession.formatLimit(3599), "0:59:59");
Assert.assertEquals(JnaSession.formatLimit(7200), "2:00:00");
Assert.assertEquals(JnaSession.formatLimit(7260), "2:01:00");
Assert.assertEquals(JnaSession.formatLimit(7261), "2:01:01");
Assert.assertEquals(JnaSession.parseLimit("0"), 0);
Assert.assertEquals(JnaSession.parseLimit("00"), 0);
Assert.assertEquals(JnaSession.parseLimit("0:00"), 0);
Assert.assertEquals(JnaSession.parseLimit("00:00"), 0);
Assert.assertEquals(JnaSession.parseLimit("0:00:00"), 0);
Assert.assertEquals(JnaSession.parseLimit("1"), 1);
Assert.assertEquals(JnaSession.parseLimit("01"), 1);
Assert.assertEquals(JnaSession.parseLimit("0:01"), 1);
Assert.assertEquals(JnaSession.parseLimit("00:01"), 1);
Assert.assertEquals(JnaSession.parseLimit("0:00:01"), 1);
Assert.assertEquals(JnaSession.parseLimit("10"), 10);
Assert.assertEquals(JnaSession.parseLimit("0:10"), 10);
Assert.assertEquals(JnaSession.parseLimit("00:10"), 10);
Assert.assertEquals(JnaSession.parseLimit("0:00:10"), 10);
Assert.assertEquals(JnaSession.parseLimit("1:0"), 60);
Assert.assertEquals(JnaSession.parseLimit("1:00"), 60);
Assert.assertEquals(JnaSession.parseLimit("01:00"), 60);
Assert.assertEquals(JnaSession.parseLimit("0:01:00"), 60);
Assert.assertEquals(JnaSession.parseLimit("1:00:00"), 3600);
Assert.assertEquals(JnaSession.parseLimit("1:02:03"), 3723);
}
}

View File

@ -0,0 +1,236 @@
/*
* Copyright (c) 2011, The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.jna.drmaa.v1_0;
import com.sun.jna.Memory;
import com.sun.jna.NativeLong;
import com.sun.jna.Pointer;
import com.sun.jna.StringArray;
import com.sun.jna.ptr.IntByReference;
import com.sun.jna.ptr.PointerByReference;
import org.apache.commons.io.FileUtils;
import org.broadinstitute.sting.BaseTest;
import org.testng.Assert;
import org.testng.annotations.Test;
import java.io.File;
import java.util.Arrays;
import java.util.List;
public class LibDrmaaIntegrationTest extends BaseTest {
@Test
public void testDrmaa() throws Exception {
Memory error = new Memory(LibDrmaa.DRMAA_ERROR_STRING_BUFFER);
int errnum;
IntByReference major = new IntByReference();
IntByReference minor = new IntByReference();
Memory contact = new Memory(LibDrmaa.DRMAA_CONTACT_BUFFER);
Memory drmSystem = new Memory(LibDrmaa.DRMAA_DRM_SYSTEM_BUFFER);
Memory drmaaImplementation = new Memory(LibDrmaa.DRMAA_DRMAA_IMPLEMENTATION_BUFFER);
errnum = LibDrmaa.drmaa_version(major, minor, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Could not get version from the DRMAA library: %s", error.getString(0)));
System.out.println(String.format("DRMAA version: %d.%d", major.getValue(), minor.getValue()));
errnum = LibDrmaa.drmaa_get_contact(contact, LibDrmaa.DRMAA_CONTACT_BUFFER_LEN, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Could not get contacts from the DRMAA library: %s", error.getString(0)));
System.out.println(String.format("DRMAA contact(s): %s", contact.getString(0)));
errnum = LibDrmaa.drmaa_get_DRM_system(drmSystem, LibDrmaa.DRMAA_DRM_SYSTEM_BUFFER_LEN, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Could not get DRM system from the DRMAA library: %s", error.getString(0)));
System.out.println(String.format("DRM system(s): %s", drmSystem.getString(0)));
errnum = LibDrmaa.drmaa_get_DRMAA_implementation(drmaaImplementation, LibDrmaa.DRMAA_DRMAA_IMPLEMENTATION_BUFFER_LEN, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Could not get DRMAA implementation from the DRMAA library: %s", error.getString(0)));
System.out.println(String.format("DRMAA implementation(s): %s", drmaaImplementation.getString(0)));
}
@Test
public void testSubmitEcho() throws Exception {
Memory error = new Memory(LibDrmaa.DRMAA_ERROR_STRING_BUFFER);
int errnum;
File outFile = createNetworkTempFile("LibDrmaaIntegrationTest-", ".out");
errnum = LibDrmaa.drmaa_init(null, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Could not initialize the DRMAA library: %s", error.getString(0)));
try {
PointerByReference jtRef = new PointerByReference();
Pointer jt;
Memory jobIdMem = new Memory(LibDrmaa.DRMAA_JOBNAME_BUFFER);
String jobId;
IntByReference remotePs = new IntByReference();
IntByReference stat = new IntByReference();
PointerByReference rusage = new PointerByReference();
errnum = LibDrmaa.drmaa_allocate_job_template(jtRef, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Could not create job template: %s", error.getString(0)));
jt = jtRef.getValue();
errnum = LibDrmaa.drmaa_set_attribute(jt, LibDrmaa.DRMAA_REMOTE_COMMAND, "sh", error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Could not set attribute \"%s\": %s", LibDrmaa.DRMAA_REMOTE_COMMAND, error.getString(0)));
errnum = LibDrmaa.drmaa_set_attribute(jt, LibDrmaa.DRMAA_OUTPUT_PATH, ":" + outFile.getAbsolutePath(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Could not set attribute \"%s\": %s", LibDrmaa.DRMAA_OUTPUT_PATH, error.getString(0)));
errnum = LibDrmaa.drmaa_set_attribute(jt, LibDrmaa.DRMAA_JOIN_FILES, "y", error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Could not set attribute \"%s\": %s", LibDrmaa.DRMAA_JOIN_FILES, error.getString(0)));
StringArray args = new StringArray(new String[] { "-c", "echo \"Hello world.\"" });
errnum = LibDrmaa.drmaa_set_vector_attribute(jt, LibDrmaa.DRMAA_V_ARGV, args, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Could not set attribute \"%s\": %s", LibDrmaa.DRMAA_REMOTE_COMMAND, error.getString(0)));
errnum = LibDrmaa.drmaa_run_job(jobIdMem, LibDrmaa.DRMAA_JOBNAME_BUFFER_LEN, jt, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Could not submit job: %s", error.getString(0)));
jobId = jobIdMem.getString(0);
System.out.println(String.format("Job id %s", jobId));
errnum = LibDrmaa.drmaa_delete_job_template(jt, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Could not delete job template: %s", error.getString(0)));
System.out.println("Waiting for job to run: " + jobId);
remotePs.setValue(LibDrmaa.DRMAA_PS.DRMAA_PS_QUEUED_ACTIVE);
List<Integer> runningStatuses = Arrays.asList(
LibDrmaa.DRMAA_PS.DRMAA_PS_QUEUED_ACTIVE, LibDrmaa.DRMAA_PS.DRMAA_PS_RUNNING);
while (runningStatuses.contains(remotePs.getValue())) {
Thread.sleep(30 * 1000L);
errnum = LibDrmaa.drmaa_job_ps(jobId, remotePs, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Could not get status for jobId %s: %s", jobId, error.getString(0)));
}
Assert.assertEquals(remotePs.getValue(), LibDrmaa.DRMAA_PS.DRMAA_PS_DONE, "Job status is not DONE.");
errnum = LibDrmaa.drmaa_wait(jobId, Pointer.NULL, new NativeLong(0), stat, LibDrmaa.DRMAA_TIMEOUT_NO_WAIT,
rusage, error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Wait failed for jobId %s: %s", jobId, error.getString(0)));
IntByReference exited = new IntByReference();
IntByReference exitStatus = new IntByReference();
IntByReference signaled = new IntByReference();
Memory signal = new Memory(LibDrmaa.DRMAA_SIGNAL_BUFFER);
IntByReference coreDumped = new IntByReference();
IntByReference aborted = new IntByReference();
errnum = LibDrmaa.drmaa_wifexited(exited, stat.getValue(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Exit check failed for jobId %s: %s", jobId, error.getString(0)));
Assert.assertTrue(exited.getValue() != 0, String.format("Job did not exit cleanly: %s", jobId));
errnum = LibDrmaa.drmaa_wexitstatus(exitStatus, stat.getValue(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Exit status failed for jobId %s: %s", jobId, error.getString(0)));
Assert.assertEquals(exitStatus.getValue(), 0, String.format("Exit status for jobId %s is non-zero", jobId));
errnum = LibDrmaa.drmaa_wifsignaled(signaled, stat.getValue(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Signaled check failed for jobId %s: %s", jobId, error.getString(0)));
if (signaled.getValue() != 0) {
errnum = LibDrmaa.drmaa_wtermsig(signal, LibDrmaa.DRMAA_SIGNAL_BUFFER_LEN, stat.getValue(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Signal lookup failed for jobId %s: %s", jobId, error.getString(0)));
errnum = LibDrmaa.drmaa_wcoredump(coreDumped, stat.getValue(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Core dump check failed for jobId %s: %s", jobId, error.getString(0)));
Assert.fail(String.format("JobId %s exited with signal %s and core dump flag %d", jobId, signal.getString(0), coreDumped.getValue()));
}
errnum = LibDrmaa.drmaa_wifaborted(aborted, stat.getValue(), error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Aborted check failed for jobId %s: %s", jobId, error.getString(0)));
Assert.assertTrue(aborted.getValue() == 0, String.format("Job was aborted: %s", jobId));
} finally {
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS) {
LibDrmaa.drmaa_exit(error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
} else {
errnum = LibDrmaa.drmaa_exit(error, LibDrmaa.DRMAA_ERROR_STRING_BUFFER_LEN);
if (errnum != LibDrmaa.DRMAA_ERRNO.DRMAA_ERRNO_SUCCESS)
Assert.fail(String.format("Could not shut down the DRMAA library: %s", error.getString(0)));
}
}
Assert.assertTrue(FileUtils.waitFor(outFile, 120), "File not found: " + outFile.getAbsolutePath());
System.out.println("--- output ---");
System.out.println(FileUtils.readFileToString(outFile));
System.out.println("--- output ---");
Assert.assertTrue(outFile.delete(), "Unable to delete " + outFile.getAbsolutePath());
System.out.println("Validating that we reached the end of the test without exit.");
}
}

View File

@ -91,7 +91,7 @@ public class LibBatIntegrationTest extends BaseTest {
}
@Test
public void testSubmitEcho() throws InterruptedException {
public void testSubmitEcho() throws Exception {
String queue = "hour";
File outFile = createNetworkTempFile("LibBatIntegrationTest-", ".out");
@ -114,6 +114,10 @@ public class LibBatIntegrationTest extends BaseTest {
req.command = "echo \"Hello world.\"";
String[] argv = {"", "-a", "tv"};
int setOptionResult = LibBat.setOption_(argv.length, new StringArray(argv), "a:", req, ~0, ~0, ~0, null);
Assert.assertTrue(setOptionResult != -1, "setOption_ returned -1");
submitReply reply = new submitReply();
long jobId = LibBat.lsb_submit(req, reply);
@ -142,6 +146,9 @@ public class LibBatIntegrationTest extends BaseTest {
Assert.assertTrue(Utils.isFlagSet(jobStatus, LibBat.JOB_STAT_DONE), String.format("Unexpected job status: 0x%02x", jobStatus));
Assert.assertTrue(FileUtils.waitFor(outFile, 120), "File not found: " + outFile.getAbsolutePath());
System.out.println("--- output ---");
System.out.println(FileUtils.readFileToString(outFile));
System.out.println("--- output ---");
Assert.assertTrue(outFile.delete(), "Unable to delete " + outFile.getAbsolutePath());
Assert.assertEquals(reply.queue, req.queue, "LSF reply queue does not match requested queue.");
System.out.println("Validating that we reached the end of the test without exit.");

View File

@ -84,12 +84,6 @@ class DataProcessingPipeline extends QScript {
var nContigs: Int = 0 // Use the number of contigs for scatter gathering jobs
var cleanModelEnum: ConsensusDeterminationModel = ConsensusDeterminationModel.USE_READS
if (cleaningModel == "KNOWNS_ONLY") {
cleanModelEnum = ConsensusDeterminationModel.KNOWNS_ONLY
}
else if (cleaningModel == "USE_SW") {
cleanModelEnum = ConsensusDeterminationModel.USE_SW
}
@ -148,9 +142,9 @@ class DataProcessingPipeline extends QScript {
println (f)
println()
val sampleFileName = new File(qscript.outputDir + qscript.projectName + "." + sample + ".bam")
val sampleFileName = new File(qscript.outputDir + qscript.projectName + "." + sample + ".list")
sampleBamFiles(sample) = sampleFileName
add(joinBams(flist, sampleFileName))
add(writeList(flist, sampleFileName))
}
println("*** INPUT FILES ***\n\n")
@ -176,18 +170,20 @@ class DataProcessingPipeline extends QScript {
var realignedBams: List[File] = List()
var index = 1
for (bam <- bams) {
val readSortedBam = swapExt(bam, ".bam", "." + index + ".sorted.bam" )
// first revert the BAM file to the original qualities
val revertedBAM = revertBAM(bam)
val readSortedBam = swapExt(revertedBAM, ".bam", "." + index + ".sorted.bam" )
val saiFile1 = swapExt(bam, ".bam", "." + index + ".1.sai")
val saiFile2 = swapExt(bam, ".bam", "." + index + ".2.sai")
val realignedSamFile = swapExt(bam, ".bam", "." + index + ".realigned.sam")
val realignedBamFile = swapExt(bam, ".bam", "." + index + ".realigned.bam")
val rgRealignedBamFile = swapExt(bam, ".bam", "." + index + ".realigned.rg.bam")
if (useBWAse) {
add(bwa_aln_se(bam, saiFile1),
bwa_sam_se(bam, saiFile1, realignedSamFile))
add(bwa_aln_se(revertedBAM, saiFile1),
bwa_sam_se(revertedBAM, saiFile1, realignedSamFile))
}
else {
add(sortSam(bam, readSortedBam, SortOrder.queryname),
add(sortSam(revertedBAM, readSortedBam, SortOrder.queryname),
bwa_aln_pe(readSortedBam, saiFile1, 1),
bwa_aln_pe(readSortedBam, saiFile2, 2),
bwa_sam_pe(readSortedBam, saiFile1, saiFile2, realignedSamFile))
@ -200,6 +196,27 @@ class DataProcessingPipeline extends QScript {
return realignedBams
}
def getIndelCleaningModel(): ConsensusDeterminationModel = {
if (cleaningModel == "KNOWNS_ONLY")
ConsensusDeterminationModel.KNOWNS_ONLY
else if (cleaningModel == "USE_SW")
ConsensusDeterminationModel.USE_SW
else
ConsensusDeterminationModel.USE_READS
}
def revertBams(bams: List[File]): List[File] = {
var revertedBAMList: List[File] = List()
for (bam <- bams)
revertedBAMList :+= revertBAM(bam)
return revertedBAMList
}
def revertBAM(bam: File): File = {
val revertedBAM = swapExt(bam, ".bam", ".reverted.bam")
add(revert(bam, revertedBAM))
return revertedBAM
}
/****************************************************************************
* Main script
@ -208,21 +225,23 @@ class DataProcessingPipeline extends QScript {
def script = {
cleanModelEnum = getIndelCleaningModel()
// keep a record of the number of contigs in the first bam file in the list
val bams = QScriptUtils.createListFromFile(input)
nContigs = QScriptUtils.getNumberOfContigs(bams(0))
val realignedBams = if (useBWApe || useBWAse) {performAlignment(bams)} else {bams}
val realignedBAMs = if (useBWApe || useBWAse) {performAlignment(bams)} else {revertBams(bams)}
// Generate a BAM file per sample joining all per lane files if necessary
val sampleBamFiles: Map[String, File] = createSampleFiles(bams, realignedBams)
val sampleBAMFiles: Map[String, File] = createSampleFiles(bams, realignedBAMs)
// Final output list of processed bam files
var cohortList: List[File] = List()
// Simple progress report
println("\nFound the following samples: ")
for ((sample, file) <- sampleBamFiles)
for ((sample, file) <- sampleBAMFiles)
println("\t" + sample + " -> " + file)
println("\n")
@ -232,7 +251,8 @@ class DataProcessingPipeline extends QScript {
add(target(null, globalIntervals))
// Put each sample through the pipeline
for ((sample, bam) <- sampleBamFiles) {
for ((sample, sampleFile) <- sampleBAMFiles) {
val bam = if (sampleFile.endsWith(".list")) {swapExt(sampleFile, ".list", ".bam")} else {sampleFile}
// BAM files generated by the pipeline
val cleanedBam = swapExt(bam, ".bam", ".clean.bam")
@ -249,17 +269,18 @@ class DataProcessingPipeline extends QScript {
val preValidateLog = swapExt(bam, ".bam", ".pre.validation")
val postValidateLog = swapExt(bam, ".bam", ".post.validation")
// Validation is an optional step for the BAM file generated after
// alignment and the final bam file of the pipeline.
if (!noValidation) {
add(validate(bam, preValidateLog),
if (!noValidation && sampleFile.endsWith(".bam")) { // todo -- implement validation for .list BAM files
add(validate(sampleFile, preValidateLog),
validate(recalBam, postValidateLog))
}
if (cleaningModel != ConsensusDeterminationModel.KNOWNS_ONLY)
add(target(bam, targetIntervals))
add(target(sampleFile, targetIntervals))
add(clean(bam, targetIntervals, cleanedBam),
add(clean(sampleFile, targetIntervals, cleanedBam),
dedup(cleanedBam, dedupedBam, metricsFile),
cov(dedupedBam, preRecalFile),
recal(dedupedBam, preRecalFile, recalBam),
@ -300,27 +321,26 @@ class DataProcessingPipeline extends QScript {
}
case class target (inBams: File, outIntervals: File) extends RealignerTargetCreator with CommandLineGATKArgs {
if (cleaningModel != ConsensusDeterminationModel.KNOWNS_ONLY)
if (cleanModelEnum != ConsensusDeterminationModel.KNOWNS_ONLY)
this.input_file :+= inBams
this.out = outIntervals
this.mismatchFraction = 0.0
this.rodBind :+= RodBind("dbsnp", "VCF", dbSNP)
this.known :+= qscript.dbSNP
if (indels != null)
this.rodBind :+= RodBind("indels", "VCF", indels)
this.known :+= qscript.indels
this.scatterCount = nContigs
this.analysisName = queueLogDir + outIntervals + ".target"
this.jobName = queueLogDir + outIntervals + ".target"
}
case class clean (inBams: File, tIntervals: File, outBam: File) extends IndelRealigner with CommandLineGATKArgs {
@Output(doc="output bai file") var bai = swapExt(outBam, ".bam", ".bai")
this.input_file :+= inBams
this.targetIntervals = tIntervals
this.out = outBam
this.rodBind :+= RodBind("dbsnp", "VCF", dbSNP)
this.known :+= qscript.dbSNP
if (qscript.indels != null)
this.rodBind :+= RodBind("indels", "VCF", qscript.indels)
this.consensusDeterminationModel = consensusDeterminationModel
this.known :+= qscript.indels
this.consensusDeterminationModel = cleanModelEnum
this.compress = 0
this.scatterCount = nContigs
this.analysisName = queueLogDir + outBam + ".clean"
@ -328,7 +348,7 @@ class DataProcessingPipeline extends QScript {
}
case class cov (inBam: File, outRecalFile: File) extends CountCovariates with CommandLineGATKArgs {
this.rodBind :+= RodBind("dbsnp", "VCF", dbSNP)
this.knownSites :+= qscript.dbSNP
this.covariate ++= List("ReadGroupCovariate", "QualityScoreCovariate", "CycleCovariate", "DinucCovariate")
this.input_file :+= inBam
this.recal_file = outRecalFile
@ -368,16 +388,15 @@ class DataProcessingPipeline extends QScript {
}
case class dedup (inBam: File, outBam: File, metricsFile: File) extends MarkDuplicates with ExternalCommonArgs {
@Output(doc="output bai file") var bai = swapExt(outBam, ".bam", ".bai")
this.input = List(inBam)
this.input :+= inBam
this.output = outBam
this.metrics = metricsFile
this.memoryLimit = 16
this.analysisName = queueLogDir + outBam + ".dedup"
this.jobName = queueLogDir + outBam + ".dedup"
}
case class joinBams (inBams: List[File], outBam: File) extends MergeSamFiles with ExternalCommonArgs {
@Output(doc="output bai file") var bai = swapExt(outBam, ".bam", ".bai")
this.input = inBams
this.output = outBam
this.analysisName = queueLogDir + outBam + ".joinBams"
@ -385,8 +404,7 @@ class DataProcessingPipeline extends QScript {
}
case class sortSam (inSam: File, outBam: File, sortOrderP: SortOrder) extends SortSam with ExternalCommonArgs {
@Output(doc="output bai file") var bai = swapExt(outBam, ".bam", ".bai")
this.input = List(inSam)
this.input :+= inSam
this.output = outBam
this.sortOrder = sortOrderP
this.analysisName = queueLogDir + outBam + ".sortSam"
@ -394,7 +412,7 @@ class DataProcessingPipeline extends QScript {
}
case class validate (inBam: File, outLog: File) extends ValidateSamFile with ExternalCommonArgs {
this.input = List(inBam)
this.input :+= inBam
this.output = outLog
this.REFERENCE_SEQUENCE = qscript.reference
this.isIntermediate = false
@ -404,8 +422,7 @@ class DataProcessingPipeline extends QScript {
case class addReadGroup (inBam: File, outBam: File, readGroup: ReadGroup) extends AddOrReplaceReadGroups with ExternalCommonArgs {
@Output(doc="output bai file") var bai = swapExt(outBam, ".bam", ".bai")
this.input = List(inBam)
this.input :+= inBam
this.output = outBam
this.RGID = readGroup.id
this.RGCN = readGroup.cn
@ -418,6 +435,14 @@ class DataProcessingPipeline extends QScript {
this.jobName = queueLogDir + outBam + ".rg"
}
case class revert (inBam: File, outBam: File) extends RevertSam with ExternalCommonArgs {
this.output = outBam
this.input :+= inBam
this.analysisName = queueLogDir + outBam + "revert"
this.jobName = queueLogDir + outBam + ".revert"
}
case class bwa_aln_se (inBam: File, outSai: File) extends CommandLineFunction with ExternalCommonArgs {
@Input(doc="bam file to be aligned") var bam = inBam
@Output(doc="output sai file") var sai = outSai

View File

@ -5,17 +5,6 @@ import org.broadinstitute.sting.queue.extensions.gatk._
import org.broadinstitute.sting.queue.QScript
import org.broadinstitute.sting.gatk.phonehome.GATKRunReport
// ToDos:
// reduce the scope of the datasets so the script is more nimble
// create gold standard BAQ'd bam files, no reason to always do it on the fly
// Analysis to add at the end of the script:
// auto generation of the cluster plots
// spike in NA12878 to the exomes and to the lowpass, analysis of how much of her variants are being recovered compared to single sample exome or HiSeq calls
// produce Kiran's Venn plots based on comparison between new VCF and gold standard produced VCF
class MethodsDevelopmentCallingPipeline extends QScript {
qscript =>
@ -28,15 +17,12 @@ class MethodsDevelopmentCallingPipeline extends QScript {
@Argument(shortName="dataset", doc="selects the datasets to run. If not provided, all datasets will be used", required=false)
var datasets: List[String] = Nil
@Argument(shortName="skipGoldStandard", doc="doesn't run the pipeline with the goldstandard VCF files for comparison", required=false)
var skipGoldStandard: Boolean = false
@Argument(shortName="runGoldStandard", doc="run the pipeline with the goldstandard VCF files for comparison", required=false)
var runGoldStandard: Boolean = false
@Argument(shortName="noBAQ", doc="turns off BAQ calculation", required=false)
var noBAQ: Boolean = false
@Argument(shortName="eval", doc="adds the VariantEval walker to the pipeline", required=false)
var eval: Boolean = false
@Argument(shortName="indels", doc="calls indels with the Unified Genotyper", required=false)
var callIndels: Boolean = false
@ -52,8 +38,6 @@ class MethodsDevelopmentCallingPipeline extends QScript {
@Argument(shortName="sample", doc="Samples to include in Variant Eval", required=false)
var samples: List[String] = Nil
class Target(
val baseName: String,
val reference: File,
@ -65,7 +49,9 @@ class MethodsDevelopmentCallingPipeline extends QScript {
val intervals: String,
val titvTarget: Double,
val trancheTarget: Double,
val isLowpass: Boolean) {
val isLowpass: Boolean,
val isExome: Boolean,
val nSamples: Int) {
val name = qscript.outputDir + baseName
val clusterFile = new File(name + ".clusters")
val rawVCF = new File(name + ".raw.vcf")
@ -89,9 +75,8 @@ class MethodsDevelopmentCallingPipeline extends QScript {
val b36 = new File("/humgen/1kg/reference/human_b36_both.fasta")
val b37 = new File("/humgen/1kg/reference/human_g1k_v37.fasta")
val dbSNP_hg18_129 = "/humgen/gsa-hpprojects/GATK/data/Comparisons/Validated/dbSNP/dbsnp_129_hg18.rod" // Special case for NA12878 collections that can't use 132 because they are part of it.
val dbSNP_b36 = "/humgen/gsa-hpprojects/GATK/data/Comparisons/Validated/dbSNP/dbsnp_129_b36.rod"
val dbSNP_b37 = "/humgen/gsa-hpprojects/GATK/data/Comparisons/Validated/dbSNP/dbsnp_132_b37.leftAligned.vcf"
val dbSNP_b37_129 = "/humgen/gsa-hpprojects/GATK/data/Comparisons/Validated/dbSNP/dbsnp_129_b37.leftAligned.vcf" // Special case for NA12878 collections that can't use 132 because they are part of it.
val dbSNP_b36 = "/humgen/gsa-hpprojects/GATK/data/Comparisons/Validated/dbSNP/dbsnp_132.b36.excluding_sites_after_129.vcf"
val dbSNP_b37 = "/humgen/gsa-hpprojects/GATK/data/Comparisons/Validated/dbSNP/dbsnp_129_b37.leftAligned.vcf" // Special case for NA12878 collections that can't use 132 because they are part of it.
val hapmap_hg18 = "/humgen/gsa-hpprojects/GATK/data/Comparisons/Validated/HapMap/3.3/sites_r27_nr.hg18_fwd.vcf"
val hapmap_b36 = "/humgen/gsa-hpprojects/GATK/data/Comparisons/Validated/HapMap/3.3/sites_r27_nr.b36_fwd.vcf"
val hapmap_b37 = "/humgen/gsa-hpprojects/GATK/data/Comparisons/Validated/HapMap/3.3/sites_r27_nr.b37_fwd.vcf"
@ -100,55 +85,61 @@ class MethodsDevelopmentCallingPipeline extends QScript {
val omni_b37 = "/humgen/gsa-hpprojects/GATK/data/Comparisons/Validated/Omni2.5_chip/Omni25_sites_1525_samples.b37.vcf"
val indelMask_b36 = "/humgen/1kg/processing/pipeline_test_bams/pilot1.dindel.mask.b36.bed"
val indelMask_b37 = "/humgen/1kg/processing/pipeline_test_bams/pilot1.dindel.mask.b37.bed"
val training_1000G = "/humgen/1kg/processing/official_release/phase1/projectConsensus/phase1.wgs.projectConsensus.v2b.recal.highQuality.vcf"
val badSites_1000G = "/humgen/1kg/processing/official_release/phase1/projectConsensus/phase1.wgs.projectConsensus.v2b.recal.terrible.vcf"
val projectConsensus_1000G = "/humgen/1kg/processing/official_release/phase1/projectConsensus/ALL.wgs.projectConsensus_v2b.20101123.snps.sites.vcf"
val lowPass: Boolean = true
val exome: Boolean = true
val indels: Boolean = true
val queueLogDir = ".qlog/"
// BUGBUG: We no longer support b36/hg18 because several of the necessary files aren't available aligned to those references
val targetDataSets: Map[String, Target] = Map(
"HiSeq" -> new Target("NA12878.HiSeq", hg18, dbSNP_hg18_129, hapmap_hg18,
"/humgen/gsa-hpprojects/dev/depristo/oneOffProjects/1000GenomesProcessingPaper/wgs.v13/HiSeq.WGS.cleaned.indels.10.mask",
new File("/humgen/gsa-hpprojects/NA12878Collection/bams/NA12878.HiSeq.WGS.bwa.cleaned.recal.bam"),
new File("/home/radon01/depristo/work/oneOffProjects/1000GenomesProcessingPaper/wgs.v13/HiSeq.WGS.cleaned.ug.snpfiltered.indelfiltered.vcf"),
"/humgen/1kg/processing/pipeline_test_bams/whole_genome_chunked.hg18.intervals", 2.07, 99.0, !lowPass),
"HiSeq19" -> new Target("NA12878.HiSeq19", hg19, dbSNP_b37_129, hapmap_b37, indelMask_b37,
"/humgen/1kg/processing/pipeline_test_bams/whole_genome_chunked.hg18.intervals", 2.14, 99.0, !lowPass, !exome, 1),
"HiSeq19" -> new Target("NA12878.HiSeq19", hg19, dbSNP_b37, hapmap_b37, indelMask_b37,
new File("/humgen/gsa-hpprojects/NA12878Collection/bams/NA12878.HiSeq.WGS.bwa.cleaned.recal.hg19.bam"),
new File("/humgen/gsa-hpprojects/dev/carneiro/hiseq19/analysis/snps/NA12878.HiSeq19.filtered.vcf"),
"/humgen/1kg/processing/pipeline_test_bams/whole_genome_chunked.hg19.intervals", 2.3, 99.0, !lowPass),
"GA2hg19" -> new Target("NA12878.GA2.hg19", hg19, dbSNP_b37_129, hapmap_b37, indelMask_b37,
"/humgen/1kg/processing/pipeline_test_bams/whole_genome_chunked.noChrY.hg19.intervals", 2.14, 99.0, !lowPass, !exome, 1),
"GA2hg19" -> new Target("NA12878.GA2.hg19", hg19, dbSNP_b37, hapmap_b37, indelMask_b37,
new File("/humgen/gsa-hpprojects/NA12878Collection/bams/NA12878.GA2.WGS.bwa.cleaned.hg19.bam"),
new File("/humgen/gsa-hpprojects/dev/carneiro/hiseq19/analysis/snps/NA12878.GA2.hg19.filtered.vcf"),
"/humgen/1kg/processing/pipeline_test_bams/whole_genome_chunked.hg19.intervals", 2.3, 99.0, !lowPass),
"/humgen/1kg/processing/pipeline_test_bams/whole_genome_chunked.hg19.intervals", 2.14, 99.0, !lowPass, !exome, 1),
"WEx" -> new Target("NA12878.WEx", hg18, dbSNP_hg18_129, hapmap_hg18,
"/humgen/gsa-hpprojects/dev/depristo/oneOffProjects/1000GenomesProcessingPaper/wgs.v13/GA2.WEx.cleaned.indels.10.mask",
new File("/humgen/gsa-hpprojects/NA12878Collection/bams/NA12878.WEx.cleaned.recal.bam"),
new File("/home/radon01/depristo/work/oneOffProjects/1000GenomesProcessingPaper/wgs.v13/GA2.WEx.cleaned.ug.snpfiltered.indelfiltered.vcf"),
"/seq/references/HybSelOligos/whole_exome_agilent_1.1_refseq_plus_3_boosters/whole_exome_agilent_1.1_refseq_plus_3_boosters.targets.interval_list", 2.6, 97.0, !lowPass),
"WExTrio" -> new Target("CEUTrio.WEx", hg19, dbSNP_b37_129, hapmap_b37, indelMask_b37,
"/seq/references/HybSelOligos/whole_exome_agilent_1.1_refseq_plus_3_boosters/whole_exome_agilent_1.1_refseq_plus_3_boosters.targets.interval_list", 3.3, 98.0, !lowPass, exome, 1),
"WExTrio" -> new Target("CEUTrio.WEx", hg19, dbSNP_b37, hapmap_b37, indelMask_b37,
new File("/humgen/gsa-hpprojects/NA12878Collection/bams/CEUTrio.HiSeq.WEx.bwa.cleaned.recal.bam"),
new File("/humgen/gsa-hpprojects/dev/carneiro/trio/analysis/snps/CEUTrio.WEx.filtered.vcf"),
"/seq/references/HybSelOligos/whole_exome_agilent_1.1_refseq_plus_3_boosters/whole_exome_agilent_1.1_refseq_plus_3_boosters.Homo_sapiens_assembly19.targets.interval_list", 2.6, 97.0, !lowPass),
"WGSTrio" -> new Target("CEUTrio.WGS", hg19, dbSNP_b37_129, hapmap_b37, indelMask_b37,
"/seq/references/HybSelOligos/whole_exome_agilent_1.1_refseq_plus_3_boosters/whole_exome_agilent_1.1_refseq_plus_3_boosters.Homo_sapiens_assembly19.targets.interval_list", 3.3, 98.0, !lowPass, exome, 3),
"WGSTrio" -> new Target("CEUTrio.WGS", hg19, dbSNP_b37, hapmap_b37, indelMask_b37,
new File("/humgen/gsa-hpprojects/NA12878Collection/bams/CEUTrio.HiSeq.WGS.bwa.cleaned.recal.bam"),
new File("/humgen/gsa-hpprojects/dev/carneiro/trio/analysis/snps/CEUTrio.WEx.filtered.vcf"), // ** THIS GOLD STANDARD NEEDS TO BE CORRECTED **
"/humgen/1kg/processing/pipeline_test_bams/whole_genome_chunked.hg19.intervals", 2.3, 99.0, !lowPass),
"/humgen/1kg/processing/pipeline_test_bams/whole_genome_chunked.hg19.intervals", 2.3, 99.0, !lowPass, !exome, 3),
"FIN" -> new Target("FIN", b37, dbSNP_b37, hapmap_b37, indelMask_b37,
new File("/humgen/1kg/processing/pipeline_test_bams/FIN.79sample.Nov2010.chr20.bam"),
new File("/humgen/gsa-hpprojects/dev/data/AugChr20Calls_v4_3state/ALL.august.v4.chr20.filtered.vcf"), // ** THIS GOLD STANDARD NEEDS TO BE CORRECTED **
"/humgen/1kg/processing/pipeline_test_bams/whole_genome_chunked.chr20.hg19.intervals", 2.3, 99.0, lowPass),
"/humgen/1kg/processing/pipeline_test_bams/whole_genome_chunked.chr20.hg19.intervals", 2.3, 99.0, lowPass, !exome, 79),
"TGPWExGdA" -> new Target("1000G.WEx.GdA", b37, dbSNP_b37, hapmap_b37, indelMask_b37,
new File("/humgen/1kg/processing/pipeline_test_bams/Barcoded_1000G_WEx_Reduced_Plate_1.cleaned.list"), // BUGBUG: reduce from 60 to 20 people
new File("/humgen/gsa-scr1/delangel/NewUG/calls/AugustRelease.filtered_Q50_QD5.0_SB0.0.allSamples.SNPs_hg19.WEx_UG_newUG_MQC.vcf"), // ** THIS GOLD STANDARD NEEDS TO BE CORRECTED **
"/seq/references/HybSelOligos/whole_exome_agilent_1.1_refseq_plus_3_boosters/whole_exome_agilent_1.1_refseq_plus_3_boosters.Homo_sapiens_assembly19.targets.interval_list", 2.6, 99.0, !lowPass),
"/seq/references/HybSelOligos/whole_exome_agilent_1.1_refseq_plus_3_boosters/whole_exome_agilent_1.1_refseq_plus_3_boosters.Homo_sapiens_assembly19.targets.interval_list", 2.6, 99.0, !lowPass, exome, 96),
"LowPassN60" -> new Target("lowpass.N60", b36, dbSNP_b36, hapmap_b36, indelMask_b36,
new File("/humgen/1kg/analysis/bamsForDataProcessingPapers/lowpass_b36/lowpass.chr20.cleaned.matefixed.bam"), // the bam list to call from
new File("/home/radon01/depristo/work/oneOffProjects/VQSRCutByNRS/lowpass.N60.chr20.filtered.vcf"), // the gold standard VCF file to run through the VQSR
"/humgen/1kg/processing/pipeline_test_bams/whole_genome_chunked.chr20.b36.intervals", 2.3, 99.0, lowPass), // chunked interval list to use with Queue's scatter/gather functionality
"/humgen/1kg/processing/pipeline_test_bams/whole_genome_chunked.chr20.b36.intervals", 2.3, 99.0, lowPass, !exome, 60), // chunked interval list to use with Queue's scatter/gather functionality
"LowPassEUR363Nov" -> new Target("EUR.nov2010", b37, dbSNP_b37, hapmap_b37, indelMask_b37,
new File("/humgen/1kg/processing/pipeline_test_bams/EUR.363sample.Nov2010.chr20.bam"),
new File("/humgen/gsa-hpprojects/dev/data/AugChr20Calls_v4_3state/ALL.august.v4.chr20.filtered.vcf"), // ** THIS GOLD STANDARD NEEDS TO BE CORRECTED **
"/humgen/1kg/processing/pipeline_test_bams/whole_genome_chunked.chr20.hg19.intervals", 2.3, 99.0, lowPass)
"/humgen/1kg/processing/pipeline_test_bams/whole_genome_chunked.chr20.hg19.intervals", 2.3, 99.0, lowPass, !exome, 363)
)
@ -170,9 +161,9 @@ class MethodsDevelopmentCallingPipeline extends QScript {
add(new snpCall(target))
add(new VQSR(target, !goldStandard))
add(new applyVQSR(target, !goldStandard))
if (eval) add(new snpEvaluation(target))
add(new snpEvaluation(target))
}
if ( !skipGoldStandard ) {
if ( runGoldStandard ) {
add(new VQSR(target, goldStandard))
add(new applyVQSR(target, goldStandard))
}
@ -187,22 +178,19 @@ class MethodsDevelopmentCallingPipeline extends QScript {
}
def bai(bam: File) = new File(bam + ".bai")
val FiltersToIgnore = List("DPFilter", "ABFilter", "ESPStandard", "QualByDepth", "StrandBias", "HomopolymerRun")
// 1.) Unified Genotyper Base
class GenotyperBase (t: Target) extends UnifiedGenotyper with UNIVERSAL_GATK_ARGS {
this.memoryLimit = 3
this.reference_sequence = t.reference
this.intervalsString ++= List(t.intervals)
this.scatterCount = 63 // the smallest interval list has 63 intervals, one for each Mb on chr20
this.scatterCount = 140
this.nt = 2
this.dcov = if ( t.isLowpass ) { 50 } else { 250 }
this.stand_call_conf = if ( t.isLowpass ) { 4.0 } else { 30.0 }
this.stand_emit_conf = if ( t.isLowpass ) { 4.0 } else { 30.0 }
this.input_file :+= t.bamList
if (t.dbsnpFile.endsWith(".rod"))
this.DBSNP = new File(t.dbsnpFile)
else if (t.dbsnpFile.endsWith(".vcf"))
this.rodBind :+= RodBind("dbsnp", "VCF", t.dbsnpFile)
this.D = new File(t.dbsnpFile)
}
// 1a.) Call SNPs with UG
@ -216,7 +204,6 @@ class MethodsDevelopmentCallingPipeline extends QScript {
this.baq = if (noBAQ) {org.broadinstitute.sting.utils.baq.BAQ.CalculationMode.OFF} else {org.broadinstitute.sting.utils.baq.BAQ.CalculationMode.CALCULATE_AS_NECESSARY}
this.analysisName = t.name + "_UGs"
this.jobName = queueLogDir + t.name + ".snpcall"
this.A ++= List("FisherStrand")
}
// 1b.) Call Indels with UG
@ -234,15 +221,14 @@ class MethodsDevelopmentCallingPipeline extends QScript {
this.reference_sequence = t.reference
this.intervalsString ++= List(t.intervals)
this.scatterCount = 10
this.filterName ++= List("HARD_TO_VALIDATE")
this.filterExpression ++= List("\"MQ0 >= 4 && (MQ0 / (1.0 * DP)) > 0.1\"")
this.variantVCF = t.rawIndelVCF
this.V = t.rawIndelVCF
this.out = t.filteredIndelVCF
this.filterName ++= List("LowQual", "StrandBias", "QualByDepth", "HomopolymerRun")
if (t.isLowpass)
this.filterExpression ++= List("\"QUAL<30.0\"", "\"SB>=-1.0\"", "\"QD<1.0\"", "\"HRun>=15\"")
else
this.filterExpression ++= List("\"QUAL<50.0\"", "\"SB>=-1.0\"", "\"QD<5.0\"", "\"HRun>=15\"")
this.filterName ++= List("IndelQD", "IndelReadPosRankSum", "IndelFS")
this.filterExpression ++= List("\"QD < 2.0\"", "\"ReadPosRankSum < -20.0\"", "\"FS > 200.0\"")
if (t.nSamples >= 10) {
this.filterName ++= List("IndelInbreedingCoeff")
this.filterExpression ++= List("\"InbreedingCoeff < -0.8\"")
}
this.analysisName = t.name + "_VF"
this.jobName = queueLogDir + t.name + ".indelfilter"
}
@ -250,70 +236,74 @@ class MethodsDevelopmentCallingPipeline extends QScript {
// 3.) Variant Quality Score Recalibration - Generate Recalibration table
class VQSR(t: Target, goldStandard: Boolean) extends VariantRecalibrator with UNIVERSAL_GATK_ARGS {
this.memoryLimit = 4
this.nt = 2
this.reference_sequence = t.reference
this.intervalsString ++= List(t.intervals)
this.rodBind :+= RodBind("input", "VCF", if ( goldStandard ) { t.goldStandard_VCF } else { t.rawVCF } )
this.rodBind :+= RodBind("hapmap", "VCF", t.hapmapFile, "known=false,training=true,truth=true,prior=15.0")
if( t.hapmapFile.contains("b37") )
this.rodBind :+= RodBind("omni", "VCF", omni_b37, "known=false,training=true,truth=true,prior=12.0")
else if( t.hapmapFile.contains("b36") )
this.rodBind :+= RodBind("omni", "VCF", omni_b36, "known=false,training=true,truth=true,prior=12.0")
if (t.dbsnpFile.endsWith(".rod"))
this.rodBind :+= RodBind("dbsnp", "DBSNP", t.dbsnpFile, "known=true,training=false,truth=false,prior=10.0")
else if (t.dbsnpFile.endsWith(".vcf"))
this.rodBind :+= RodBind("dbsnp", "VCF", t.dbsnpFile, "known=true,training=false,truth=false,prior=10.0")
this.use_annotation ++= List("QD", "HaplotypeScore", "MQRankSum", "ReadPosRankSum", "HRun", "FS")
this.input :+= ( if ( goldStandard ) { t.goldStandard_VCF } else { t.rawVCF } )
this.training :+= new TaggedFile( t.hapmapFile, "prior=15.0")
this.truth :+= new TaggedFile( t.hapmapFile, "prior=15.0")
this.training :+= new TaggedFile( omni_b37, "prior=12.0")
this.truth :+= new TaggedFile( omni_b37, "prior=12.0")
this.training :+= new TaggedFile( training_1000G, "prior=10.0" )
this.known :+= new TaggedFile( t.dbsnpFile, "prior=2.0" )
this.resource :+= new TaggedFile( projectConsensus_1000G, "prior=8.0" )
this.use_annotation ++= List("QD", "HaplotypeScore", "MQRankSum", "ReadPosRankSum", "MQ", "FS")
if(t.nSamples >= 10) {
this.use_annotation ++= List("InbreedingCoeff")
}
if(!t.isExome) {
this.use_annotation ++= List("DP")
} else {
this.mG = 6
}
this.tranches_file = if ( goldStandard ) { t.goldStandardTranchesFile } else { t.tranchesFile }
this.recal_file = if ( goldStandard ) { t.goldStandardRecalFile } else { t.recalFile }
this.allPoly = true
this.tranche ++= List("100.0", "99.9", "99.5", "99.3", "99.0", "98.9", "98.8", "98.5", "98.4", "98.3", "98.2", "98.1", "98.0", "97.9", "97.8", "97.5", "97.0", "95.0", "90.0")
this.rscript_file = t.vqsrRscript
this.analysisName = t.name + "_VQSR"
this.jobName = queueLogDir + t.name + ".VQSR"
this.jobName = queueLogDir + t.name + ".VQSR"
}
// 4.) Apply the recalibration table to the appropriate tranches
class applyVQSR (t: Target, goldStandard: Boolean) extends ApplyRecalibration with UNIVERSAL_GATK_ARGS {
this.memoryLimit = 4
this.memoryLimit = 6
this.reference_sequence = t.reference
this.intervalsString ++= List(t.intervals)
this.rodBind :+= RodBind("input", "VCF", if ( goldStandard ) { t.goldStandard_VCF } else { t.rawVCF } )
this.input :+= ( if ( goldStandard ) { t.goldStandard_VCF } else { t.rawVCF } )
this.tranches_file = if ( goldStandard ) { t.goldStandardTranchesFile } else { t.tranchesFile}
this.recal_file = if ( goldStandard ) { t.goldStandardRecalFile } else { t.recalFile }
this.ts_filter_level = t.trancheTarget
this.out = t.recalibratedVCF
this.analysisName = t.name + "_AVQSR"
this.jobName = queueLogDir + t.name + ".applyVQSR"
this.jobName = queueLogDir + t.name + ".applyVQSR"
}
// 5.) Variant Evaluation Base(OPTIONAL)
class EvalBase(t: Target) extends VariantEval with UNIVERSAL_GATK_ARGS {
this.memoryLimit = 3
this.reference_sequence = t.reference
this.rodBind :+= RodBind("comphapmap", "VCF", t.hapmapFile)
this.comp :+= new TaggedFile(t.hapmapFile, "hapmap" )
this.intervalsString ++= List(t.intervals)
if (t.dbsnpFile.endsWith(".rod"))
this.DBSNP = new File(t.dbsnpFile)
else if (t.dbsnpFile.endsWith(".vcf"))
this.rodBind :+= RodBind("dbsnp", "VCF", t.dbsnpFile)
this.D = new File(t.dbsnpFile)
this.sample = samples
}
// 5a.) SNP Evaluation (OPTIONAL) based on the cut vcf
class snpEvaluation(t: Target) extends EvalBase(t) {
if (t.reference == b37 || t.reference == hg19) this.rodBind :+= RodBind("compomni", "VCF", omni_b37)
this.rodBind :+= RodBind("eval", "VCF", t.recalibratedVCF )
if (t.reference == b37 || t.reference == hg19) this.comp :+= new TaggedFile( omni_b37, "omni" )
this.eval :+= t.recalibratedVCF
this.out = t.evalFile
this.analysisName = t.name + "_VEs"
this.jobName = queueLogDir + t.name + ".snp.eval"
this.jobName = queueLogDir + t.name + ".snp.eval"
}
// 5b.) Indel Evaluation (OPTIONAL)
class indelEvaluation(t: Target) extends EvalBase(t) {
this.rodBind :+= RodBind("eval", "VCF", t.filteredIndelVCF)
this.eval :+= t.filteredIndelVCF
this.evalModule :+= "IndelStatistics"
this.out = t.evalIndelFile
this.analysisName = t.name + "_VEi"
this.jobName = queueLogDir + queueLogDir + t.name + ".indel.eval"
this.jobName = queueLogDir + queueLogDir + t.name + ".indel.eval"
}
}

View File

@ -41,12 +41,27 @@ class QSettings {
@Argument(fullName="job_queue", shortName="jobQueue", doc="Default queue for compute farm jobs.", required=false)
var jobQueue: String = _
@Argument(fullName="job_priority", shortName="jobPriority", doc="Default priority for jobs.", required=false)
@Argument(fullName="job_priority", shortName="jobPriority", doc="Default priority for jobs. Min = 0, Max = 100", required=false)
var jobPriority: Option[Int] = None
@Argument(fullName="default_memory_limit", shortName="memLimit", doc="Default memory limit for jobs, in gigabytes.", required=false)
@Argument(fullName="job_native_arg", shortName="jobNative", doc="Native arguments to pass to the job runner.", required=false)
var jobNativeArgs: List[String] = Nil
@Argument(fullName="job_resource_request", shortName="jobResReq", doc="Resource requests to pass to the job runner.", required=false)
var jobResourceRequests: List[String] = Nil
@Argument(fullName="job_environment_name", shortName="jobEnv", doc="Environment names for the job runner.", required=false)
var jobEnvironmentNames: List[String] = Nil
@Argument(fullName="memory_limit", shortName="memLimit", doc="Default memory limit for jobs, in gigabytes.", required=false)
var memoryLimit: Option[Double] = None
@Argument(fullName="resident_memory_limit", shortName="resMemLimit", doc="Default resident memory limit for jobs, in gigabytes.", required=false)
var residentLimit: Option[Double] = None
@Argument(fullName="resident_memory_request", shortName="resMemReq", doc="Default resident memory request for jobs, in gigabytes.", required=false)
var residentRequest: Option[Double] = None
@Argument(fullName="run_directory", shortName="runDir", doc="Root directory to run functions from.", required=false)
var runDirectory = new File(".")

View File

@ -51,10 +51,21 @@ trait CommandLineJobRunner extends JobRunner[CommandLineFunction] with Logging {
/** The last time the status was updated */
protected var lastStatusUpdate: Long = _
final override def status = this.lastStatus
/** The runner specific priority for a minimum priority job */
protected val minRunnerPriority = 0
def residentRequestMB: Option[Double] = function.memoryLimit.map(_ * 1024)
def residentLimitMB: Option[Double] = residentRequestMB.map( _ * 1.2 )
/** The runner specific priority for a maximum priority job */
protected val maxRunnerPriority = 0
/** The priority of the function in the range defined by the runner */
protected def functionPriority = {
function.jobPriority.map { priority =>
(((priority / 100D) * (maxRunnerPriority - minRunnerPriority)) + minRunnerPriority).
round.intValue() min maxRunnerPriority max minRunnerPriority
}
}
final override def status = this.lastStatus
override def init() {
super.init()

View File

@ -30,6 +30,9 @@ import org.broadinstitute.sting.queue.function.QFunction
* Creates and stops JobRunners
*/
trait JobManager[TFunction <: QFunction, TRunner <: JobRunner[TFunction]] {
def init() {}
def exit() {}
/** The class type of the runner. Available at runtime even after erasure. */
def functionType: Class[TFunction]
@ -52,6 +55,5 @@ trait JobManager[TFunction <: QFunction, TRunner <: JobRunner[TFunction]] {
* Stops a list of functions.
* @param runners Runners to stop.
*/
def tryStop(runners: Set[TRunner]) {
}
def tryStop(runners: Set[TRunner]) {}
}

View File

@ -361,6 +361,13 @@ class QGraph extends Logging {
settings.jobRunner = "Shell"
commandLineManager = commandLinePluginManager.createByName(settings.jobRunner)
for (mgr <- managers) {
if (mgr != null) {
val manager = mgr.asInstanceOf[JobManager[QFunction,JobRunner[QFunction]]]
manager.init()
}
}
if (settings.startFromScratch)
logger.info("Removing outputs from previous runs.")
@ -1034,18 +1041,26 @@ class QGraph extends Logging {
for (mgr <- managers) {
if (mgr != null) {
val manager = mgr.asInstanceOf[JobManager[QFunction,JobRunner[QFunction]]]
val managerRunners = runners
.filter(runner => manager.runnerType.isAssignableFrom(runner.getClass))
.asInstanceOf[Set[JobRunner[QFunction]]]
if (managerRunners.size > 0)
try {
manager.tryStop(managerRunners)
} catch {
case e => /* ignore */
try {
val managerRunners = runners
.filter(runner => manager.runnerType.isAssignableFrom(runner.getClass))
.asInstanceOf[Set[JobRunner[QFunction]]]
if (managerRunners.size > 0)
try {
manager.tryStop(managerRunners)
} catch {
case e => /* ignore */
}
for (runner <- managerRunners) {
try {
runner.cleanup()
} catch {
case e => /* ignore */
}
}
for (runner <- managerRunners) {
} finally {
try {
runner.cleanup()
manager.exit()
} catch {
case e => /* ignore */
}

View File

@ -0,0 +1,61 @@
/*
* Copyright (c) 2011, The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.queue.engine.drmaa
import org.broadinstitute.sting.queue.function.CommandLineFunction
import org.broadinstitute.sting.queue.engine.CommandLineJobManager
import org.broadinstitute.sting.jna.drmaa.v1_0.JnaSessionFactory
import org.ggf.drmaa.Session
/**
* Runs jobs using DRMAA
*/
class DrmaaJobManager extends CommandLineJobManager[DrmaaJobRunner] {
protected var session: Session = _
protected def newSession() = new JnaSessionFactory().getSession
protected def contact = null
override def init() {
session = newSession()
session.init(contact)
}
override def exit() {
session.exit()
}
def runnerType = classOf[DrmaaJobRunner]
def create(function: CommandLineFunction) = new DrmaaJobRunner(session, function)
override def updateStatus(runners: Set[DrmaaJobRunner]) = {
var updatedRunners = Set.empty[DrmaaJobRunner]
runners.foreach(runner => if (runner.updateJobStatus()) {updatedRunners += runner})
updatedRunners
}
override def tryStop(runners: Set[DrmaaJobRunner]) {
runners.filterNot(_.jobId == null).foreach(_.tryStop())
}
}

View File

@ -0,0 +1,149 @@
/*
* Copyright (c) 2011, The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.queue.engine.drmaa
import org.broadinstitute.sting.queue.QException
import org.broadinstitute.sting.queue.util.{Logging,Retry}
import org.broadinstitute.sting.queue.function.CommandLineFunction
import org.broadinstitute.sting.queue.engine.{RunnerStatus, CommandLineJobRunner}
import java.util.Collections
import org.ggf.drmaa._
/**
* Runs jobs using DRMAA.
*/
class DrmaaJobRunner(val session: Session, val function: CommandLineFunction) extends CommandLineJobRunner with Logging {
/** Job Id of the currently executing job. */
var jobId: String = _
override def jobIdString = jobId
// Set the display name to < 512 characters of the description
// NOTE: Not sure if this is configuration specific?
protected val jobNameLength = 500
protected val jobNameFilter = """[^A-Za-z0-9_]"""
protected def functionNativeSpec = function.jobNativeArgs.mkString(" ")
def start() {
session.synchronized {
val drmaaJob: JobTemplate = session.createJobTemplate
drmaaJob.setJobName(function.description.take(jobNameLength).replaceAll(jobNameFilter, "_"))
// Set the current working directory
drmaaJob.setWorkingDirectory(function.commandDirectory.getPath)
// Set the output file for stdout
drmaaJob.setOutputPath(":" + function.jobOutputFile.getPath)
// If the error file is set specify the separate output for stderr
// Otherwise join with stdout
if (function.jobErrorFile != null) {
drmaaJob.setErrorPath(":" + function.jobErrorFile.getPath)
} else {
drmaaJob.setJoinFiles(true)
}
drmaaJob.setNativeSpecification(functionNativeSpec)
// Instead of running the function.commandLine, run "sh <jobScript>"
drmaaJob.setRemoteCommand("sh")
drmaaJob.setArgs(Collections.singletonList(jobScript.toString))
// Allow advanced users to update the request via QFunction.updateJobRun()
updateJobRun(drmaaJob)
updateStatus(RunnerStatus.RUNNING)
// Start the job and store the id so it can be killed in tryStop
try {
Retry.attempt(() => {
try {
jobId = session.runJob(drmaaJob)
} catch {
case de: DrmaaException => throw new QException("Unable to submit job: " + de.getLocalizedMessage)
}
}, 1, 5, 10)
} finally {
// Prevent memory leaks
session.deleteJobTemplate(drmaaJob)
}
logger.info("Submitted job id: " + jobId)
}
}
def updateJobStatus() = {
session.synchronized {
var returnStatus: RunnerStatus.Value = null
try {
val jobStatus = session.getJobProgramStatus(jobId);
jobStatus match {
case Session.QUEUED_ACTIVE => returnStatus = RunnerStatus.RUNNING
case Session.DONE =>
val jobInfo: JobInfo = session.wait(jobId, Session.TIMEOUT_NO_WAIT)
if ((jobInfo.hasExited && jobInfo.getExitStatus != 0)
|| jobInfo.hasSignaled
|| jobInfo.wasAborted)
returnStatus = RunnerStatus.FAILED
else
returnStatus = RunnerStatus.DONE
case Session.FAILED => returnStatus = RunnerStatus.FAILED
case Session.UNDETERMINED => logger.warn("Unable to determine status of job id " + jobId)
case _ => returnStatus = RunnerStatus.RUNNING
}
} catch {
// getJobProgramStatus will throw an exception once wait has run, as the
// job will be reaped. If the status is currently DONE or FAILED, return
// the status.
case de: DrmaaException =>
if (lastStatus == RunnerStatus.DONE || lastStatus == RunnerStatus.FAILED)
returnStatus = lastStatus
else
logger.warn("Unable to determine status of job id " + jobId, de)
}
if (returnStatus != null) {
updateStatus(returnStatus)
true
} else {
false
}
}
}
def tryStop() {
session.synchronized {
try {
// Stop runners. SIGTERM(15) is preferred to SIGKILL(9).
// Only way to send SIGTERM is for the Sys Admin set the terminate_method
// resource of the designated queue to SIGTERM
session.control(jobId, Session.TERMINATE)
} catch {
case e =>
logger.error("Unable to kill job " + jobId, e)
}
}
}
}

View File

@ -24,13 +24,9 @@
package org.broadinstitute.sting.queue.engine.gridengine
import org.broadinstitute.sting.queue.engine.CommandLineJobManager
import org.broadinstitute.sting.queue.function.CommandLineFunction
import org.broadinstitute.sting.queue.engine.drmaa.DrmaaJobManager
class GridEngineJobManager extends CommandLineJobManager[GridEngineJobRunner] {
def runnerType = classOf[GridEngineJobRunner]
def create(function: CommandLineFunction) = new GridEngineJobRunner(function)
override def updateStatus(runners: Set[GridEngineJobRunner]) = { GridEngineJobRunner.updateStatus(runners) }
override def tryStop(runners: Set[GridEngineJobRunner]) { GridEngineJobRunner.tryStop(runners) }
class GridEngineJobManager extends DrmaaJobManager {
override def create(function: CommandLineFunction) = new GridEngineJobRunner(session, function)
}

View File

@ -24,203 +24,52 @@
package org.broadinstitute.sting.queue.engine.gridengine
import org.broadinstitute.sting.queue.QException
import org.broadinstitute.sting.queue.util.{Logging,Retry}
import org.broadinstitute.sting.queue.util.Logging
import org.broadinstitute.sting.queue.function.CommandLineFunction
import org.broadinstitute.sting.queue.engine.{RunnerStatus, CommandLineJobRunner}
import org.ggf.drmaa.{DrmaaException,JobInfo,JobTemplate,Session,SessionFactory}
import java.util.Collections
import org.broadinstitute.sting.queue.engine.drmaa.DrmaaJobRunner
import org.ggf.drmaa.Session
/**
* Runs jobs on a Grid Engine compute cluster.
*/
class GridEngineJobRunner(val function: CommandLineFunction) extends CommandLineJobRunner with Logging {
// Run the static initializer for GridEngineJobRunner
GridEngineJobRunner
/** Job Id of the currently executing job. */
private var jobId: String = _
override def jobIdString = jobId
def start() {
GridEngineJobRunner.gridEngineSession.synchronized {
val gridEngineJob: JobTemplate = GridEngineJobRunner.gridEngineSession.createJobTemplate
// Force the remote environment to inherit local environment settings
var nativeSpecString: String = "-V"
// Set the display name to < 512 characters of the description
// NOTE: Not sure if this is configuration specific?
gridEngineJob.setJobName(GridEngineJobRunner.toJobName(function.description.take(500)))
// Set the output file for stdout
gridEngineJob.setOutputPath(":" + function.jobOutputFile.getPath)
// Set the current working directory
gridEngineJob.setWorkingDirectory(function.commandDirectory.getPath)
// If the error file is set specify the separate output for stderr
// Otherwise join with stdout
if (Option(function.jobErrorFile) != None) {
gridEngineJob.setErrorPath(":" + function.jobErrorFile.getPath)
} else {
gridEngineJob.setJoinFiles(true)
}
// If a project name is set specify the project name
if (Option(function.jobProject) != None) {
nativeSpecString += " -P " + function.jobProject
}
// If the job queue is set specify the job queue
if (Option(function.jobQueue) != None) {
nativeSpecString += " -q " + function.jobQueue
}
// If the resident set size is requested pass on the memory request
if (residentRequestMB.isDefined) {
nativeSpecString += " -l mem_free=%dM".format(residentRequestMB.get.ceil.toInt)
}
// If the resident set size limit is defined specify the memory limit
if (residentLimitMB.isDefined) {
nativeSpecString += " -l h_rss=%dM".format(residentLimitMB.get.ceil.toInt)
}
// If the priority is set (user specified Int) specify the priority
if (function.jobPriority.isDefined) {
nativeSpecString += " -p " + function.jobPriority.get
}
gridEngineJob.setNativeSpecification(nativeSpecString)
// Instead of running the function.commandLine, run "sh <jobScript>"
gridEngineJob.setRemoteCommand("sh")
gridEngineJob.setArgs(Collections.singletonList(jobScript.toString))
// Allow advanced users to update the request via QFunction.updateJobRun()
updateJobRun(gridEngineJob)
updateStatus(RunnerStatus.RUNNING)
// Start the job and store the id so it can be killed in tryStop
try {
Retry.attempt(() => {
try {
jobId = GridEngineJobRunner.gridEngineSession.runJob(gridEngineJob)
} catch {
case de: DrmaaException => throw new QException("Unable to submit job: " + de.getLocalizedMessage)
}
}, 1, 5, 10)
} finally {
// Prevent memory leaks
GridEngineJobRunner.gridEngineSession.deleteJobTemplate(gridEngineJob)
}
logger.info("Submitted Grid Engine job id: " + jobId)
}
}
}
object GridEngineJobRunner extends Logging {
private val gridEngineSession = SessionFactory.getFactory.getSession
initGridEngine()
/**
* Initialize the Grid Engine library.
*/
private def initGridEngine() {
gridEngineSession.synchronized {
try {
gridEngineSession.init("")
} catch {
case de: DrmaaException =>
logger.error("Issue initializing Grid Engine", de)
throw new QException("init() failed", de)
}
}
}
/**
* Updates the status of a list of jobs.
* @param runners Runners to update.
* @return runners which were updated.
*/
def updateStatus(runners: Set[GridEngineJobRunner]) = {
var updatedRunners = Set.empty[GridEngineJobRunner]
gridEngineSession.synchronized {
runners.foreach(runner => if (updateRunnerStatus(runner)) {updatedRunners += runner})
}
updatedRunners
}
/**
* Tries to stop any running jobs.
* @param runners Runners to stop.
*/
def tryStop(runners: Set[GridEngineJobRunner]) {
// Stop runners. SIGTERM(15) is preferred to SIGKILL(9).
// Only way to send SIGTERM is for the Sys Admin set the terminate_method
// resource of the designated queue to SIGTERM
gridEngineSession.synchronized {
for (runner <- runners.filterNot(runner => Option(runner.jobId) == None)) {
try {
gridEngineSession.control(runner.jobId, Session.TERMINATE)
} catch {
case e =>
logger.error("Unable to kill job " + runner.jobId, e)
}
}
gridEngineSession.exit()
}
}
private def updateRunnerStatus(runner: GridEngineJobRunner): Boolean = {
var returnStatus: RunnerStatus.Value = null
try {
val jobStatus = gridEngineSession.getJobProgramStatus(runner.jobId);
jobStatus match {
case Session.QUEUED_ACTIVE => returnStatus = RunnerStatus.RUNNING
case Session.DONE =>
val jobInfo: JobInfo = gridEngineSession.wait(runner.jobId, Session.TIMEOUT_NO_WAIT)
if ((jobInfo.hasExited && jobInfo.getExitStatus > 0)
|| jobInfo.hasSignaled
|| jobInfo.wasAborted)
returnStatus = RunnerStatus.FAILED
else
returnStatus = RunnerStatus.DONE
case Session.FAILED => returnStatus = RunnerStatus.FAILED
case Session.UNDETERMINED => logger.warn("Unable to determine status of Grid Engine job id " + runner.jobId)
case _ => returnStatus = RunnerStatus.RUNNING
}
} catch {
// getJobProgramStatus will throw an exception once wait has run, as the
// job will be reaped. If the status is currently DONE or FAILED, return
// the status.
case de: DrmaaException =>
if (runner.lastStatus == RunnerStatus.DONE || runner.lastStatus == RunnerStatus.FAILED)
returnStatus = runner.lastStatus
else
logger.warn("Unable to determine status of Grid Engine job id " + runner.jobId, de)
}
if (returnStatus != null) {
runner.updateStatus(returnStatus)
true
} else {
false
}
}
// Reap what we've sown
override def finalize() {
gridEngineSession.exit()
}
class GridEngineJobRunner(session: Session, function: CommandLineFunction) extends DrmaaJobRunner(session, function) with Logging {
// Grid Engine disallows certain characters from being in job names.
// This replaces all illegal characters with underscores
private def toJobName(name: String): String = {
name.replaceAll("""[\n\t\r/:@\\*?]""", "_")
protected override val jobNameFilter = """[\n\t\r/:@\\*?]"""
protected override val minRunnerPriority = -1023
protected override val maxRunnerPriority = 0
override protected def functionNativeSpec = {
// Force the remote environment to inherit local environment settings
var nativeSpec: String = "-V"
// If a project name is set specify the project name
if (function.jobProject != null)
nativeSpec += " -P " + function.jobProject
// If the job queue is set specify the job queue
if (function.jobQueue != null)
nativeSpec += " -q " + function.jobQueue
// If the resident set size is requested pass on the memory request
if (function.residentRequest.isDefined)
nativeSpec += " -l mem_free=%dM".format(function.residentRequest.map(_ * 1024).get.ceil.toInt)
// If the resident set size limit is defined specify the memory limit
if (function.residentLimit.isDefined)
nativeSpec += " -l h_rss=%dM".format(function.residentLimit.map(_ * 1024).get.ceil.toInt)
// Pass on any job resource requests
nativeSpec += function.jobResourceRequests.map(" -l " + _).mkString
// Pass on any job environment names
nativeSpec += function.jobEnvironmentNames.map(" -pe " + _).mkString
// If the priority is set specify the priority
val priority = functionPriority
if (priority.isDefined)
nativeSpec += " -p " + priority.get
(nativeSpec + " " + super.functionNativeSpec).trim()
}
}

View File

@ -34,6 +34,8 @@ import org.broadinstitute.sting.jna.lsf.v7_0_6.LibBat.{submitReply, submit}
import com.sun.jna.ptr.IntByReference
import org.broadinstitute.sting.queue.engine.{RunnerStatus, CommandLineJobRunner}
import com.sun.jna.{Structure, StringArray, NativeLong}
import java.util.regex.Pattern
import java.lang.StringBuffer
/**
* Runs jobs on an LSF compute cluster.
@ -47,12 +49,22 @@ class Lsf706JobRunner(val function: CommandLineFunction) extends CommandLineJobR
private var jobId = -1L
override def jobIdString = jobId.toString
protected override val minRunnerPriority = 1
protected override val maxRunnerPriority = Lsf706JobRunner.maxUserPriority
private val selectString = new StringBuffer()
private val usageString = new StringBuffer()
private val requestString = new StringBuffer()
/**
* Dispatches the function on the LSF cluster.
* @param function Command to run.
*/
def start() {
Lsf706JobRunner.lsfLibLock.synchronized {
parseResourceRequest()
val request = new submit
for (i <- 0 until LibLsf.LSF_RLIM_NLIMITS)
request.rLimits(i) = LibLsf.DEFAULT_RLIMIT;
@ -81,28 +93,45 @@ class Lsf706JobRunner(val function: CommandLineFunction) extends CommandLineJobR
}
// If the resident set size is requested pass on the memory request
if (residentRequestMB.isDefined) {
val memInUnits = Lsf706JobRunner.convertUnits(residentRequestMB.get)
request.resReq = "select[mem>%1$d] rusage[mem=%1$d]".format(memInUnits)
if (function.residentRequest.isDefined) {
val memInUnits = Lsf706JobRunner.convertUnits(function.residentRequest.get)
appendRequest("select", selectString, "&&", "mem>%d".format(memInUnits))
appendRequest("rusage", usageString, ",", "mem=%d".format(memInUnits))
}
val resReq = getResourceRequest
if (resReq.length > 0) {
request.resReq = resReq
request.options |= LibBat.SUB_RES_REQ
}
// If the resident set size limit is defined specify the memory limit
if (residentLimitMB.isDefined) {
val memInUnits = Lsf706JobRunner.convertUnits(residentLimitMB.get)
if (function.residentLimit.isDefined) {
val memInUnits = Lsf706JobRunner.convertUnits(function.residentLimit.get)
request.rLimits(LibLsf.LSF_RLIMIT_RSS) = memInUnits
}
// If the priority is set (user specified Int) specify the priority
if (function.jobPriority.isDefined) {
request.userPriority = function.jobPriority.get
val priority = functionPriority
if (priority.isDefined) {
request.userPriority = priority.get
request.options2 |= LibBat.SUB2_JOB_PRIORITY
}
// Broad specific requirement, our esub requires there be a project
// else it will spit out a warning to stdout. see $LSF_SERVERDIR/esub
request.projectName = if (function.jobProject != null) function.jobProject else "Queue"
request.options |= LibBat.SUB_PROJECT_NAME
// Set the project to either the function or LSF default
val project = if (function.jobProject != null) function.jobProject else Lsf706JobRunner.defaultProject
if (project != null) {
request.projectName = project
request.options |= LibBat.SUB_PROJECT_NAME
}
// Set the esub names based on the job envorinment names
if (!function.jobEnvironmentNames.isEmpty) {
val argv = Array("", "-a", function.jobEnvironmentNames.mkString(" "))
val setOptionResult = LibBat.setOption_(argv.length, new StringArray(argv), "a:", request, ~0, ~0, ~0, null);
if (setOptionResult == -1)
throw new QException("setOption_() returned -1 while setting esub");
}
// LSF specific: get the max runtime for the jobQueue and pass it for this job
request.rLimits(LibLsf.LSF_RLIMIT_RUN) = Lsf706JobRunner.getRlimitRun(function.jobQueue)
@ -132,6 +161,41 @@ class Lsf706JobRunner(val function: CommandLineFunction) extends CommandLineJobR
logger.debug("Job Id %s status / exitStatus / exitInfo: ??? / ??? / ???".format(jobId))
super.checkUnknownStatus()
}
private def parseResourceRequest() {
requestString.setLength(0)
selectString.setLength(0)
usageString.setLength(0)
requestString.append(function.jobResourceRequests.mkString(" "))
extractSection(requestString, "select", selectString)
extractSection(requestString, "rusage", usageString)
}
private def extractSection(requestString: StringBuffer, section: String, sectionString: StringBuffer) {
val pattern = Pattern.compile(section + "\\s*\\[[^\\]]+\\]\\s*");
val matcher = pattern.matcher(requestString.toString)
if (matcher.find()) {
sectionString.setLength(0)
sectionString.append(matcher.group().trim())
val sb = new StringBuffer
matcher.appendReplacement(sb, "")
matcher.appendTail(sb)
requestString.setLength(0)
requestString.append(sb)
}
}
private def appendRequest(section: String, sectionString: StringBuffer, separator: String, request: String) {
if (sectionString.length() == 0)
sectionString.append(section).append("[").append(request).append("]")
else
sectionString.insert(sectionString.length() - 1, separator + request)
}
private def getResourceRequest = "%s %s %s".format(selectString, usageString, requestString).trim()
}
object Lsf706JobRunner extends Logging {
@ -141,15 +205,23 @@ object Lsf706JobRunner extends Logging {
/** Number of seconds for a non-normal exit status before we give up on expecting LSF to retry the function. */
private val retryExpiredSeconds = 5 * 60
initLsf()
/**
* Initialize the Lsf library.
*/
private def initLsf() {
private val (defaultQueue, defaultProject, maxUserPriority) = {
lsfLibLock.synchronized {
if (LibBat.lsb_init("Queue") < 0)
throw new QException(LibBat.lsb_sperror("lsb_init() failed"))
val parameterInfo = LibBat.lsb_parameterinfo(null, null, 0);
var defaultQueue: String = parameterInfo.defaultQueues
val defaultProject = parameterInfo.defaultProject
val maxUserPriority = parameterInfo.maxUserPriority
if (defaultQueue != null && defaultQueue.indexOf(' ') > 0)
defaultQueue = defaultQueue.split(" ")(0)
(defaultQueue, defaultProject, maxUserPriority)
}
}
@ -249,17 +321,6 @@ object Lsf706JobRunner extends Logging {
}
}
/** The name of the default queue. */
private lazy val defaultQueue: String = {
lsfLibLock.synchronized {
val numQueues = new IntByReference(1)
val queueInfo = LibBat.lsb_queueinfo(null, numQueues, null, null, 0)
if (queueInfo == null)
throw new QException(LibBat.lsb_sperror("Unable to get LSF queue info for the default queue"))
queueInfo.queue
}
}
/** The run limits for each queue. */
private var queueRlimitRun = Map.empty[String,Int]
@ -299,15 +360,15 @@ object Lsf706JobRunner extends Logging {
Structure.autoRead(unitsParam.asInstanceOf[Array[Structure]])
unitsParam(0).paramValue match {
case "MB" => 1D
case "GB" => 1024D
case "TB" => 1024D * 1024
case "PB" => 1024D * 1024 * 1024
case "EB" => 1024D * 1024 * 1024 * 1024
case null => 1D
case "MB" => 1 / 1024D
case "GB" => 1D
case "TB" => 1024D
case "PB" => 1024D * 1024
case "EB" => 1024D * 1024 * 1024
case null => 1 / 1024D
}
}
}
private def convertUnits(mb: Double) = (mb / unitDivisor).ceil.toInt
private def convertUnits(gb: Double) = (gb / unitDivisor).ceil.toInt
}

View File

@ -15,13 +15,13 @@ class AddOrReplaceReadGroups extends org.broadinstitute.sting.queue.function.Jav
javaMainClass = "net.sf.picard.sam.AddOrReplaceReadGroups"
@Input(doc="The input SAM or BAM files to analyze. Must be coordinate sorted.", shortName = "input", fullName = "input_bam_files", required = true)
var input: List[File] = _
var input: List[File] = Nil
@Output(doc="The output BAM file with the modified/added read groups", shortName = "output", fullName = "output_bam_file", required = true)
var output: File = _
@Output(doc="The output bam index", shortName = "out_index", fullName = "output_bam_index_file", required = false)
var outputIndex: File = new File(output + ".bai")
var outputIndex: File = _
@Argument(doc="Read group ID", shortName = "id", fullName = "read_group_id", required = true)
var RGID: String = _
@ -44,6 +44,12 @@ class AddOrReplaceReadGroups extends org.broadinstitute.sting.queue.function.Jav
@Argument(doc = "Read group description", shortName = "ds", fullName = "read_group_description", required = false)
var RGDS: String = ""
override def freezeFieldValues() {
super.freezeFieldValues()
if (outputIndex == null && output != null)
outputIndex = new File(output.getName.stripSuffix(".bam") + ".bai")
}
override def inputBams = input
override def outputBam = output

View File

@ -15,13 +15,13 @@ class MarkDuplicates extends org.broadinstitute.sting.queue.function.JavaCommand
javaMainClass = "net.sf.picard.sam.MarkDuplicates"
@Input(doc="The input SAM or BAM files to analyze. Must be coordinate sorted.", shortName = "input", fullName = "input_bam_files", required = true)
var input: List[File] = _
var input: List[File] = Nil
@Output(doc="The output file to write marked records to", shortName = "output", fullName = "output_bam_file", required = true)
var output: File = _
@Output(doc="The output bam index", shortName = "out_index", fullName = "output_bam_index_file", required = false)
var outputIndex: File = new File(output + ".bai")
var outputIndex: File = _
@Output(doc="File to write duplication metrics to", shortName = "out_metrics", fullName = "output_metrics_file", required = false)
var metrics: File = new File(output + ".metrics")
@ -35,6 +35,13 @@ class MarkDuplicates extends org.broadinstitute.sting.queue.function.JavaCommand
@Argument(doc = "This number, plus the maximum RAM available to the JVM, determine the memory footprint used by some of the sorting collections. If you are running out of memory, try reducing this number.", shortName = "sorting_ratio", fullName = "sorting_collection_size_ratio", required = false)
var SORTING_COLLECTION_SIZE_RATIO: Double = -1
override def freezeFieldValues() {
super.freezeFieldValues()
if (outputIndex == null && output != null)
outputIndex = new File(output.getName.stripSuffix(".bam") + ".bai")
}
override def inputBams = input
override def outputBam = output
this.sortOrder = null

View File

@ -3,6 +3,7 @@ package org.broadinstitute.sting.queue.extensions.picard
import org.broadinstitute.sting.commandline._
import java.io.File
import org.broadinstitute.sting.queue.QScript._
/*
* Created by IntelliJ IDEA.
@ -15,13 +16,13 @@ class MergeSamFiles extends org.broadinstitute.sting.queue.function.JavaCommandL
javaMainClass = "net.sf.picard.sam.MergeSamFiles"
@Input(doc="The input SAM or BAM files to analyze. Must be coordinate sorted.", shortName = "input", fullName = "input_bam_files", required = true)
var input: List[File] = _
var input: List[File] = Nil
@Output(doc="The output merged BAM file", shortName = "output", fullName = "output_bam_file", required = true)
var output: File = _
@Output(doc="The output bam index", shortName = "out_index", fullName = "output_bam_index_file", required = false)
var outputIndex: File = new File(output + ".bai")
var outputIndex: File = _
@Argument(doc="Merge the seqeunce dictionaries Default value: false. This option can be set to 'null' to clear the default value.", shortName = "merge_dict", fullName = "merge_sequence_dictionaries", required = false)
var MERGE_SEQUENCE_DICTIONARIES: Boolean = false
@ -32,6 +33,13 @@ class MergeSamFiles extends org.broadinstitute.sting.queue.function.JavaCommandL
@Argument(doc = "Comments to include in the merged output file's header.", shortName = "com", fullName = "comments", required = false)
var COMMENT: String = ""
override def freezeFieldValues() {
super.freezeFieldValues()
if (outputIndex == null && output != null)
outputIndex = new File(output.getName.stripSuffix(".bam") + ".bai")
}
override def inputBams = input
override def outputBam = output
this.createIndex = Some(true)

View File

@ -0,0 +1,61 @@
package org.broadinstitute.sting.queue.extensions.picard
import org.broadinstitute.sting.commandline._
import java.io.File
/*
* Created by IntelliJ IDEA.
* User: carneiro
* Date: 6/22/11
* Time: 10:35 AM
*/
class RevertSam extends org.broadinstitute.sting.queue.function.JavaCommandLineFunction with PicardBamFunction {
analysisName = "RevertSam"
javaMainClass = "net.sf.picard.sam.RevertSam"
@Input(shortName = "input", fullName = "input_bam_files", required = true, doc = "The input SAM or BAM files to revert.")
var input: List[File] = Nil
@Output(shortName = "output", fullName = "output_bam_file", required = true, doc = "The reverted BAM or SAM output file.")
var output: File = _
@Output(shortName = "out_index", fullName = "output_bam_index_file", required = false, doc = "The output bam index")
var outputIndex: File = _
@Argument(shortName = "roq", fullName = "restore_original_qualities", required = false, doc = "True to restore original qualities from the OQ field to the QUAL field if available.")
var restoreOriginalQualities: Boolean = true
@Argument(shortName = "rdi", fullName = "remove_duplicate_information", required = false, doc = "Remove duplicate read flags from all reads. Note that if this is true and REMOVE_ALIGNMENT_INFORMATION==false, the output may have the unusual but sometimes desirable trait of having unmapped reads that are marked as duplicates.")
var removeDuplicateInformation: Boolean = true
@Argument(shortName = "rai", fullName = "remove_alignment_information", required = false, doc = "Remove all alignment information from the file.")
var removeAlignmentInformation: Boolean = true
@Argument(shortName = "atc", fullName = "attributes_to_clear", required = false, doc = "When removing alignment information, the set of optional tags to remove.")
var attributesToClear: List[String] = Nil
@Argument(shortName = "sa", fullName = "sample_alias", required = false, doc = "The sample alias to use in the reverted output file. This will override the existing sample alias in the file and is used only if all the read groups in the input file have the same sample alias.")
var sampleAlias: String = null
@Argument(shortName = "ln", fullName = "library_name", required = false, doc = "The library name to use in the reverted output file. This will override the existing sample alias in the file and is used only if all the read groups in the input file have the same sample alias.")
var libraryName: String = null
override def freezeFieldValues() {
super.freezeFieldValues()
if (outputIndex == null && output != null)
outputIndex = new File(output.getName.stripSuffix(".bam") + ".bai")
}
override def inputBams = input
override def outputBam = output
this.createIndex = Some(true)
override def commandLine = super.commandLine +
conditionalParameter(!restoreOriginalQualities, " RESTORE_ORIGINAL_QUALITIES=false") +
conditionalParameter(!removeDuplicateInformation, " REMOVE_DUPLICATE_INFORMATION=false") +
conditionalParameter(!removeAlignmentInformation, " REMOVE_ALIGNMENT_INFORMATION=false") +
conditionalParameter(!attributesToClear.isEmpty, repeat(" ATTRIBUTE_TO_CLEAR=", attributesToClear)) +
conditionalParameter(sampleAlias != null, " SAMPLE_ALIAS=" + sampleAlias) +
conditionalParameter(libraryName != null, " LIBRARY_NAME=" + libraryName)
}

View File

@ -3,6 +3,7 @@ package org.broadinstitute.sting.queue.extensions.picard
import org.broadinstitute.sting.commandline._
import java.io.File
import org.broadinstitute.sting.queue.QScript._
/*
* Created by IntelliJ IDEA.
@ -15,13 +16,21 @@ class SortSam extends org.broadinstitute.sting.queue.function.JavaCommandLineFun
javaMainClass = "net.sf.picard.sam.SortSam"
@Input(doc="The input SAM or BAM files to sort.", shortName = "input", fullName = "input_bam_files", required = true)
var input: List[File] = _
var input: List[File] = Nil
@Output(doc="The sorted BAM or SAM output file.", shortName = "output", fullName = "output_bam_file", required = true)
var output: File = _
@Output(doc="The output bam index", shortName = "out_index", fullName = "output_bam_index_file", required = false)
var outputIndex: File = new File(output + ".bai")
var outputIndex: File = _
override def freezeFieldValues() {
super.freezeFieldValues()
if (outputIndex == null && output != null)
outputIndex = new File(output.getName.stripSuffix(".bam") + ".bai")
}
override def inputBams = input
override def outputBam = output

View File

@ -17,7 +17,7 @@ class ValidateSamFile extends org.broadinstitute.sting.queue.function.JavaComman
javaMainClass = "net.sf.picard.sam.ValidateSamFile"
@Input(doc="The input SAM or BAM files to analyze. Must be coordinate sorted.", shortName = "input", fullName = "input_bam_files", required = true)
var input: List[File] = _
var input: List[File] = Nil
@Output(doc="Send output to a file instead of stdout", shortName = "output", fullName = "output_file", required = false)
var output: File = _
@ -26,7 +26,7 @@ class ValidateSamFile extends org.broadinstitute.sting.queue.function.JavaComman
var MODE: Mode = Mode.VERBOSE
@Argument(doc="List of validation error types to ignore.", shortName = "ignore", fullName = "ignore_error_types", required = false)
var IGNORE: List[String] = _
var IGNORE: List[String] = Nil
@Argument(doc = "The maximum number of lines output in verbose mode.", shortName = "max", fullName = "max_output", required = false)
var MAX_OUTPUT: Int = 100

View File

@ -11,12 +11,27 @@ trait CommandLineFunction extends QFunction with Logging {
/** Upper memory limit */
var memoryLimit: Option[Double] = None
/** Resident memory limit */
var residentLimit: Option[Double] = None
/** Resident memory request */
var residentRequest: Option[Double] = None
/** Job project to run the command */
var jobProject: String = _
/** Job queue to run the command */
var jobQueue: String = _
/** Native arguments to pass to the job runner */
var jobNativeArgs: List[String] = Nil
/** Native arguments to pass to the job runner */
var jobResourceRequests: List[String] = Nil
/** Environment names to pass to the job runner */
var jobEnvironmentNames: List[String] = Nil
override def copySettingsTo(function: QFunction) {
super.copySettingsTo(function)
function match {
@ -24,13 +39,27 @@ trait CommandLineFunction extends QFunction with Logging {
if (commandLineFunction.memoryLimit.isEmpty)
commandLineFunction.memoryLimit = this.memoryLimit
if (commandLineFunction.residentLimit.isEmpty)
commandLineFunction.residentLimit = this.residentLimit
if (commandLineFunction.residentRequest.isEmpty)
commandLineFunction.residentRequest = this.residentRequest
if (commandLineFunction.jobProject == null)
commandLineFunction.jobProject = this.jobProject
if (commandLineFunction.jobQueue == null)
commandLineFunction.jobQueue = this.jobQueue
commandLineFunction.jobQueue = this.jobQueue
if (commandLineFunction.jobNativeArgs.isEmpty)
commandLineFunction.jobNativeArgs = this.jobNativeArgs
if (commandLineFunction.jobResourceRequests.isEmpty)
commandLineFunction.jobResourceRequests = this.jobResourceRequests
if (commandLineFunction.jobEnvironmentNames.isEmpty)
commandLineFunction.jobEnvironmentNames = this.jobEnvironmentNames
case _ => /* ignore */
}
}
@ -53,9 +82,30 @@ trait CommandLineFunction extends QFunction with Logging {
if (jobProject == null)
jobProject = qSettings.jobProject
if (jobNativeArgs.isEmpty)
jobNativeArgs = qSettings.jobNativeArgs
if (jobResourceRequests.isEmpty)
jobResourceRequests = qSettings.jobResourceRequests
if (jobEnvironmentNames.isEmpty)
jobEnvironmentNames = qSettings.jobEnvironmentNames
if (memoryLimit.isEmpty)
memoryLimit = qSettings.memoryLimit
if (residentLimit.isEmpty)
residentLimit = qSettings.residentLimit
if (residentRequest.isEmpty)
residentRequest = qSettings.residentRequest
if (residentRequest.isEmpty)
residentRequest = memoryLimit
if (residentLimit.isEmpty)
residentLimit = residentRequest.map( _ * 1.2 )
super.freezeFieldValues()
}

View File

@ -43,13 +43,15 @@ object PipelineTest extends BaseTest with Logging {
private val validationReportsDataLocation = "/humgen/gsa-hpprojects/GATK/validationreports/submitted/"
val run = System.getProperty("pipeline.run") == "run"
final val run = System.getProperty("pipeline.run") == "run"
private val jobRunners = {
final val allJobRunners = {
val commandLinePluginManager = new CommandLinePluginManager
commandLinePluginManager.getPlugins.map(commandLinePluginManager.getName(_)).filterNot(_ == "Shell")
commandLinePluginManager.getPlugins.map(commandLinePluginManager.getName(_)).toList
}
final val defaultJobRunners = List("Lsf706", "GridEngine")
/**
* Returns the top level output path to this test.
* @param testName The name of the test passed to PipelineTest.executeTest()
@ -79,9 +81,12 @@ object PipelineTest extends BaseTest with Logging {
* @param pipelineTest test to run.
*/
def executeTest(pipelineTest: PipelineTestSpec) {
var jobRunners = pipelineTest.jobRunners
if (jobRunners == null)
jobRunners = defaultJobRunners;
jobRunners.foreach(executeTest(pipelineTest, _))
}
/**
* Runs the pipelineTest.
* @param pipelineTest test to run.

View File

@ -1,7 +1,5 @@
package org.broadinstitute.sting.queue.pipeline
import java.io.File
class PipelineTestSpec(var name: String = null) {
/** The arguments to pass to the Queue test, ex: "-S scala/qscript/examples/HelloWorld.scala" */
@ -10,6 +8,9 @@ class PipelineTestSpec(var name: String = null) {
/** Job Queue to run the test. Default is null which means use hour. */
var jobQueue: String = _
/** Job runners to run the test. Default is null which means use the default. */
var jobRunners: List[String] = _
/** Expected MD5 results for each file path. */
var fileMD5s = Map.empty[String, String]

View File

@ -33,6 +33,7 @@ class HelloWorldPipelineTest {
val spec = new PipelineTestSpec
spec.name = "HelloWorld"
spec.args = "-S public/scala/qscript/org/broadinstitute/sting/queue/qscripts/examples/HelloWorld.scala"
spec.jobRunners = PipelineTest.allJobRunners
PipelineTest.executeTest(spec)
}
@ -40,23 +41,89 @@ class HelloWorldPipelineTest {
def testHelloWorldWithPrefix() {
val spec = new PipelineTestSpec
spec.name = "HelloWorldWithPrefix"
spec.args = "-S public/scala/qscript/org/broadinstitute/sting/queue/qscripts/examples/HelloWorld.scala -jobPrefix HelloWorld"
spec.args = "-S public/scala/qscript/org/broadinstitute/sting/queue/qscripts/examples/HelloWorld.scala" +
" -jobPrefix HelloWorld"
spec.jobRunners = PipelineTest.allJobRunners
PipelineTest.executeTest(spec)
}
@Test
def testHelloWorldWithMemoryLimit() {
val spec = new PipelineTestSpec
spec.name = "HelloWorldWithPrefix"
spec.args = "-S public/scala/qscript/org/broadinstitute/sting/queue/qscripts/examples/HelloWorld.scala -memLimit 1.25"
spec.name = "HelloWorldMemoryLimit"
spec.args = "-S public/scala/qscript/org/broadinstitute/sting/queue/qscripts/examples/HelloWorld.scala" +
" -memLimit 1.25"
spec.jobRunners = PipelineTest.allJobRunners
PipelineTest.executeTest(spec)
}
@Test(enabled=false)
@Test
def testHelloWorldWithPriority() {
val spec = new PipelineTestSpec
spec.name = "HelloWorldWithPriority"
spec.args = "-S public/scala/qscript/org/broadinstitute/sting/queue/qscripts/examples/HelloWorld.scala -jobPriority 100"
spec.args = "-S public/scala/qscript/org/broadinstitute/sting/queue/qscripts/examples/HelloWorld.scala" +
" -jobPriority 100"
spec.jobRunners = PipelineTest.allJobRunners
PipelineTest.executeTest(spec)
}
@Test
def testHelloWorldWithLsfResource() {
val spec = new PipelineTestSpec
spec.name = "HelloWorldWithLsfResource"
spec.args = "-S public/scala/qscript/org/broadinstitute/sting/queue/qscripts/examples/HelloWorld.scala" +
" -jobResReq rusage[iodine_io=1] -jobResReq select[swp>0] -jobResReq order[swp]"
spec.jobRunners = List("Lsf706")
PipelineTest.executeTest(spec)
}
@Test
def testHelloWorldWithLsfResourceAndMemoryLimit() {
val spec = new PipelineTestSpec
spec.name = "HelloWorldWithLsfResourceAndMemoryLimit"
spec.args = "-S public/scala/qscript/org/broadinstitute/sting/queue/qscripts/examples/HelloWorld.scala" +
" -memLimit 1.25 -jobResReq rusage[iodine_io=1] -jobResReq select[swp>0] -jobResReq order[swp]"
spec.jobRunners = List("Lsf706")
PipelineTest.executeTest(spec)
}
@Test
def testHelloWorldWithLsfEnvironment() {
val spec = new PipelineTestSpec
spec.name = "HelloWorldWithLsfEnvironment"
spec.args = "-S public/scala/qscript/org/broadinstitute/sting/queue/qscripts/examples/HelloWorld.scala" +
" -jobEnv tv"
spec.jobRunners = List("Lsf706")
PipelineTest.executeTest(spec)
}
@Test
def testHelloWorldWithGridEngineResource() {
val spec = new PipelineTestSpec
spec.name = "HelloWorldWithGridEngineResource"
spec.args = "-S public/scala/qscript/org/broadinstitute/sting/queue/qscripts/examples/HelloWorld.scala" +
" -jobResReq s_core=1000M"
spec.jobRunners = List("GridEngine")
PipelineTest.executeTest(spec)
}
@Test
def testHelloWorldWithGridEngineResourceAndMemoryLimit() {
val spec = new PipelineTestSpec
spec.name = "HelloWorldWithGridEngineResourceAndMemoryLimit"
spec.args = "-S public/scala/qscript/org/broadinstitute/sting/queue/qscripts/examples/HelloWorld.scala" +
" -memLimit 1.25 -jobResReq s_core=1000M"
spec.jobRunners = List("GridEngine")
PipelineTest.executeTest(spec)
}
@Test
def testHelloWorldWithGridEngineEnvironment() {
val spec = new PipelineTestSpec
spec.name = "HelloWorldWithGridEngineEnvironment"
spec.args = "-S public/scala/qscript/org/broadinstitute/sting/queue/qscripts/examples/HelloWorld.scala" +
" -jobEnv \"make 1\""
spec.jobRunners = List("GridEngine")
PipelineTest.executeTest(spec)
}
}