Removed old (and unused) reporting system, now that Kiran's VE reporting system is working. Refactors dictionary creation error messages into UserExceptions

git-svn-id: file:///humgen/gsa-scr1/gsa-engineering/svn_contents/trunk@5836 348d0f76-0448-11de-a6fe-93d51630548a
This commit is contained in:
depristo 2011-05-22 18:42:52 +00:00
parent 4e7ecbdcb2
commit f608ed6d5a
34 changed files with 35 additions and 2475 deletions

View File

@ -75,9 +75,7 @@ public class ReferenceDataSource implements ReferenceDataSourceProgressListener
try {
// get exclusive lock
if (!indexLock.exclusiveLock())
throw new ReviewedStingException("Index file could not be written because a lock could not be obtained." +
"If you are running multiple instances of GATK, another process is probably creating this " +
"file now. Please wait until it is finished and try again.");
throw new UserException.CouldNotCreateReferenceIndexFileBecauseOfLock(dictFile);
FastaSequenceIndexBuilder faiBuilder = new FastaSequenceIndexBuilder(fastaFile, this);
FastaSequenceIndex sequenceIndex = faiBuilder.createIndex();
FastaSequenceIndexBuilder.saveAsFaiFile(sequenceIndex, indexFile);
@ -93,7 +91,7 @@ public class ReferenceDataSource implements ReferenceDataSourceProgressListener
catch (Exception e) {
// If lock creation succeeded, the failure must have been generating the index.
// If lock creation failed, just skip over index creation entirely.
throw new ReviewedStingException("Index file does not exist and could not be created because " + e.getMessage(), e);
throw new UserException.CouldNotCreateReferenceIndexFile(indexFile, e);
}
finally {
indexLock.unlock();
@ -119,9 +117,7 @@ public class ReferenceDataSource implements ReferenceDataSourceProgressListener
try {
// get shared lock on dict file so nobody else can start creating it
if (!dictLock.exclusiveLock())
throw new ReviewedStingException("Dictionary file could not be written because a lock could not be obtained." +
"If you are running multiple instances of GATK, another process is probably creating this " +
"file now. Please wait until it is finished and try again.");
throw new UserException.CouldNotCreateReferenceIndexFileBecauseOfLock(dictFile);
// dict will be written to random temporary file in same directory (see note above)
File tempFile = File.createTempFile("dict", null, dictFile.getParentFile());
tempFile.deleteOnExit();
@ -132,7 +128,7 @@ public class ReferenceDataSource implements ReferenceDataSourceProgressListener
new CreateSequenceDictionary().instanceMain(args);
if (!tempFile.renameTo(dictFile))
throw new ReviewedStingException("Error transferring temp file " + tempFile + " to dict file " + dictFile);
throw new UserException("Error transferring temp file " + tempFile + " to dict file " + dictFile);
}
catch(FileSystemInabilityToLockException ex) {
logger.info("Unable to create write lock: " + ex.getMessage());
@ -141,7 +137,7 @@ public class ReferenceDataSource implements ReferenceDataSourceProgressListener
catch (Exception e) {
// If lock creation succeeded, the failure must have been generating the index.
// If lock creation failed, just skip over index creation entirely.
throw new ReviewedStingException("Dictionary file does not exist and could not be created because " + e.getMessage(), e);
throw new UserException.CouldNotCreateReferenceIndexFile(dictFile, e);
}
finally {
dictLock.unlock();

View File

@ -28,7 +28,7 @@ import org.broadinstitute.sting.utils.SampleUtils;
import org.broadinstitute.sting.utils.Utils;
import org.broadinstitute.sting.utils.exceptions.StingException;
import org.broadinstitute.sting.utils.exceptions.UserException;
import org.broadinstitute.sting.utils.report.utils.TableType;
import org.broadinstitute.sting.gatk.walkers.varianteval.util.TableType;
import org.broadinstitute.sting.utils.vcf.VCFUtils;
import java.io.File;

View File

@ -13,7 +13,7 @@ import org.broadinstitute.sting.gatk.walkers.varianteval.tags.DataPoint;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.exceptions.StingException;
import org.broadinstitute.sting.utils.exceptions.UserException;
import org.broadinstitute.sting.utils.report.utils.TableType;
import org.broadinstitute.sting.gatk.walkers.varianteval.util.TableType;
import java.util.*;

View File

@ -12,9 +12,9 @@ import org.broadinstitute.sting.gatk.walkers.varianteval.VariantEvalWalker;
import org.broadinstitute.sting.gatk.walkers.varianteval.tags.Analysis;
import org.broadinstitute.sting.gatk.walkers.varianteval.tags.DataPoint;
import org.broadinstitute.sting.gatk.walkers.varianteval.util.NewEvaluationContext;
import org.broadinstitute.sting.gatk.walkers.varianteval.util.TableType;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.MathUtils;
import org.broadinstitute.sting.utils.report.utils.TableType;
import java.util.HashMap;
import java.util.HashSet;

View File

@ -7,7 +7,7 @@ import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.varianteval.tags.Analysis;
import org.broadinstitute.sting.gatk.walkers.varianteval.tags.DataPoint;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.report.utils.TableType;
import org.broadinstitute.sting.gatk.walkers.varianteval.util.TableType;
/**
* IF THERE IS NO JAVADOC RIGHT HERE, YELL AT chartl

View File

@ -7,8 +7,8 @@ import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.varianteval.VariantEvalWalker;
import org.broadinstitute.sting.gatk.walkers.varianteval.tags.Analysis;
import org.broadinstitute.sting.gatk.walkers.varianteval.tags.DataPoint;
import org.broadinstitute.sting.gatk.walkers.varianteval.util.TableType;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.report.utils.TableType;
import java.util.ArrayList;

View File

@ -8,11 +8,10 @@ import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.varianteval.VariantEvalWalker;
import org.broadinstitute.sting.gatk.walkers.varianteval.tags.Analysis;
import org.broadinstitute.sting.gatk.walkers.varianteval.tags.DataPoint;
import org.broadinstitute.sting.gatk.walkers.varianteval.util.TableType;
import org.broadinstitute.sting.utils.IndelUtils;
import org.broadinstitute.sting.utils.report.utils.TableType;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
/*

View File

@ -12,7 +12,7 @@ import org.broadinstitute.sting.gatk.walkers.varianteval.tags.Analysis;
import org.broadinstitute.sting.gatk.walkers.varianteval.tags.DataPoint;
import org.broadinstitute.sting.gatk.walkers.varianteval.util.StateKey;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.report.utils.TableType;
import org.broadinstitute.sting.gatk.walkers.varianteval.util.TableType;
import java.util.ArrayList;

View File

@ -33,8 +33,8 @@ import org.broadinstitute.sting.gatk.contexts.variantcontext.VariantContextUtils
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.varianteval.tags.Analysis;
import org.broadinstitute.sting.gatk.walkers.varianteval.tags.DataPoint;
import org.broadinstitute.sting.gatk.walkers.varianteval.util.TableType;
import org.broadinstitute.sting.utils.collections.Pair;
import org.broadinstitute.sting.utils.report.utils.TableType;
import java.util.ArrayList;
import java.util.HashMap;

View File

@ -1,4 +1,4 @@
package org.broadinstitute.sting.utils.report.utils;
package org.broadinstitute.sting.gatk.walkers.varianteval.util;
/**

View File

@ -18,7 +18,6 @@ import org.broadinstitute.sting.gatk.walkers.varianteval.tags.DataPoint;
import org.broadinstitute.sting.utils.classloader.PluginManager;
import org.broadinstitute.sting.utils.exceptions.StingException;
import org.broadinstitute.sting.utils.exceptions.UserException;
import org.broadinstitute.sting.utils.report.utils.TableType;
import java.lang.reflect.Field;
import java.util.*;

View File

@ -1,273 +0,0 @@
package org.broadinstitute.sting.oneoffprojects.walkers.varianteval;
import org.broad.tribble.util.variantcontext.Genotype;
import org.broad.tribble.util.variantcontext.VariantContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.varianteval.VariantEvalWalker;
import org.broadinstitute.sting.gatk.walkers.varianteval.evaluators.VariantEvaluator;
import org.broadinstitute.sting.utils.collections.Pair;
import org.broadinstitute.sting.utils.report.tags.Analysis;
import org.broadinstitute.sting.utils.report.tags.DataPoint;
import org.broadinstitute.sting.utils.report.utils.TableType;
import java.util.Arrays;
import java.util.Collection;
import java.util.Set;
/**
* Created by IntelliJ IDEA.
* User: chartl
* Date: Nov 22, 2010
* Time: 12:22:08 PM
* To change this template use File | Settings | File Templates.
*/
@Analysis(name = "ACTransitionMatrix", description = "Number of additional genotypes from each new sample; random permutations")
public class ACTransitionTable extends VariantEvaluator {
private final int NUM_PERMUTATIONS = 50;
private final double LOW_GQ_PCT = 0.95;
private final double LOW_GQ_THRSH = 30.0;
private boolean initialized = false;
private long skipped = 0l;
@DataPoint(name="Het transitions",description="AC[s] = AC[s-1]+1 and AC[s] = AC[s-1]+2 transitions")
TransitionTable transitions = null;
@DataPoint(name="Private permutations",description="Marginal increase in number of sites per sample")
PermutationCounts privatePermutations;
@DataPoint(name="AC2 Permutations",description="Marginal increase in number of AC=2 sites, per sample")
PermutationCounts doubletonPermutations;
@DataPoint(name="AC3 Permutations",description="Marginal increase in number of tripleton sites, per sample")
PermutationCounts tripletonPermutations;
String[][] permutations;
public boolean enabled() {
return true;
}
public int getComparisonOrder() {
return 2;
}
public String getName() {
return "ACTransitionTable";
}
public String update2(VariantContext eval, VariantContext comp, RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
if ( eval != null && ! initialized ) {
//this.veWalker.getLogger().warn("Initializing...");
initialize(eval);
initialized = true;
}
if ( isGood(eval) ) {
if ( comp != null && ! comp.isFiltered() ) {
return null;
}
int order_offset = 0;
for ( String[] ordering : permutations ) {
int sample_offset = 0;
int variant_ac = 0;
for ( String sample : ordering ) {
if ( eval.getGenotype(sample).isHet() ) {
variant_ac++;
transitions.hetTransitionCounts[order_offset][variant_ac-1][sample_offset]++;
} else if ( eval.getGenotype(sample).isHomVar() ) {
variant_ac += 2;
transitions.homTransitionCounts[order_offset][variant_ac-1][sample_offset]++;
} else {
// todo -- note, unclear how to treat no calls. Is the hom in het,ref,ref,nocall,hom sample 4 or 5?
// todo -- do we want to tabulate P[sample i is not variant | some variant]? This is just combinatorics so i left it out
if ( variant_ac > 0 ) {
transitions.stationaryCounts[order_offset][variant_ac-1][sample_offset]++;
}
}
sample_offset ++;
}
order_offset++;
}
} else {
skipped++;
}
return null;
}
private boolean isGood(VariantContext vc) {
if ( vc == null || vc.isFiltered() || (vc.getHetCount() + vc.getHomVarCount() == 0) ) { // todo -- should be is variant, but need to ensure no alt alleles at ref sites
return false;
} else {
Collection<Genotype> gtypes = vc.getGenotypes().values();
int ngood = 0;
for ( Genotype g : gtypes) {
if ( g.isCalled() && g.getPhredScaledQual() >= LOW_GQ_THRSH ) {
ngood ++;
}
}
return ( (0.0+ngood)/(0.0+gtypes.size()) >= LOW_GQ_PCT );
}
}
public ACTransitionTable(VariantEvalWalker parent) {
//super(parent);
}
public void initialize(VariantContext vc) {
Set<String> permuteSamples = vc.getSampleNames();
permutations = new String[NUM_PERMUTATIONS][permuteSamples.size()];
//veWalker.getLogger().warn(String.format("Num samples: %d",permuteSamples.size()));
int offset = 0;
for ( String s : permuteSamples ) {
permutations[0][offset] = s;
offset ++;
}
for ( int p = 1; p < NUM_PERMUTATIONS ; p++ ) {
permutations[p] = permutations[0].clone();
for ( int o = 0; o < permutations[p].length; o ++ ) {
int r = (int) Math.floor(Math.random()*(o+1));
String swap = permutations[p][r];
permutations[p][r] = permutations[p][o];
permutations[p][o] = swap;
}
}
transitions = new TransitionTable();
transitions.hetTransitionCounts = new int[NUM_PERMUTATIONS][permuteSamples.size()*2][permuteSamples.size()];
transitions.homTransitionCounts = new int[NUM_PERMUTATIONS][permuteSamples.size()*2][permuteSamples.size()];
transitions.stationaryCounts = new int[NUM_PERMUTATIONS][permuteSamples.size()*2][permuteSamples.size()];
privatePermutations = new PermutationCounts(1,transitions);
doubletonPermutations = new PermutationCounts(2,transitions);
tripletonPermutations = new PermutationCounts(3,transitions);
}
public void finalizeEvaluation() { // note: data points are null when this is called (wtf?)
//veWalker.getLogger().info(String.format("Skipped: %d",skipped));
}
class TransitionTable implements TableType {
int[][][] hetTransitionCounts;
int[][][] homTransitionCounts;
int[][][] stationaryCounts;
String[][] countAverages;
String[] rowKeys = null;
String[] colKeys = null;
public Object[] getRowKeys() {
if ( rowKeys == null ) {
rowKeys = new String[3*hetTransitionCounts[0].length];
for ( int i = 0; i < hetTransitionCounts[0].length; i ++ ) {
rowKeys[i] = String.format("%s%d%s","AC_",i,"_(het)");
}
for ( int i = 0; i < hetTransitionCounts[0].length; i ++ ) {
rowKeys[hetTransitionCounts[0].length+i] = String.format("%s%d%s","AC_",i,"_(hom)");
}
for ( int i = 0; i < hetTransitionCounts[0].length; i ++ ) {
rowKeys[2*hetTransitionCounts[0].length+i] = String.format("%s%d%s","AC_",i,"_(ref)");
}
}
return rowKeys;
}
public String getCell(int x, int y) {
if ( countAverages == null ) {
countAverages = new String[hetTransitionCounts[0].length*3][hetTransitionCounts[0][0].length];
for ( int sam = 0; sam < hetTransitionCounts[0][0].length; sam ++) {
for ( int idx = 0 ; idx < hetTransitionCounts[0].length; idx ++ ) {
int totalTimesAtACSample = 0;
int totalStationary = 0;
int totalAC1Shift = 0;
int totalAC2Shift = 0;
for ( int p = 0; p < hetTransitionCounts.length; p++ ) {
totalStationary += stationaryCounts[p][idx][sam];
totalAC2Shift += (idx+2 >= hetTransitionCounts[0][0].length) ? 0 : homTransitionCounts[p][idx+2][sam];
totalAC1Shift += (idx+1 >= hetTransitionCounts[0][0].length) ? 0 : hetTransitionCounts[p][idx+1][sam];
}
totalTimesAtACSample = totalStationary+totalAC1Shift+totalAC2Shift;
countAverages[idx][sam] = formatProp(totalAC1Shift,totalTimesAtACSample);
countAverages[hetTransitionCounts[0].length+idx][sam] = formatProp(totalAC2Shift,totalTimesAtACSample);
countAverages[hetTransitionCounts[0].length*2+idx][sam] = formatProp(totalStationary,totalTimesAtACSample);
}
}
}
return countAverages[x][y] == null ? "0.00" : countAverages[x][y];
}
private String formatProp(int num, int denom) {
return (denom != 0) ? String.format("%.4f", ((double) num)/denom) : "0.0";
}
public String getName() { return "AC Transition Tables"; }
public Object[] getColumnKeys() {
if ( colKeys == null ) {
colKeys = new String[hetTransitionCounts[0][0].length];
for ( int ac = 0; ac < hetTransitionCounts[0][0].length; ac ++ ) {
colKeys[ac] = String.format("Sample_%d",ac);
}
}
return colKeys;
}
}
class PermutationCounts implements TableType {
int acToExtract;
TransitionTable table;
String[] rowNames;
String[] colNames;
public PermutationCounts(int ac, TransitionTable tTable) {
acToExtract = ac;
table = tTable;
}
public String[] getRowKeys() {
//System.out.printf("%s%n",table);
if ( rowNames == null ) {
rowNames = new String[table.stationaryCounts.length];
for ( int p = 0 ; p < rowNames.length; p ++ ) {
rowNames[p] = String.format("Perm%d",p+1);
}
}
return rowNames;
}
public String[] getColumnKeys() {
if ( colNames == null ) {
colNames = new String[table.stationaryCounts[0][0].length];
for ( int s = 0 ; s < colNames.length; s ++ ) {
colNames[s] = String.format("Sample%d",s+1);
}
}
return colNames;
}
public Integer getCell(int x, int y) {
return table.hetTransitionCounts[x][acToExtract-1][y] +
( (acToExtract > table.homTransitionCounts[0][0].length) ? 0 : table.homTransitionCounts[x][acToExtract-1][y]);
}
public String getName() {
return String.format("PermutationCountsAC%d",acToExtract);
}
public void init() {
getRowKeys();
getColumnKeys();
getCell(1,1);
}
}
}

View File

@ -1,212 +0,0 @@
package org.broadinstitute.sting.oneoffprojects.walkers.varianteval;
import org.broad.tribble.util.variantcontext.VariantContext;
import org.broad.tribble.vcf.VCFConstants;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.variantcontext.VariantContextUtils;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.varianteval.VariantEvalWalker;
import org.broadinstitute.sting.gatk.walkers.varianteval.evaluators.VariantEvaluator;
import org.broadinstitute.sting.gatk.walkers.varianteval.tags.Analysis;
import org.broadinstitute.sting.utils.exceptions.UserException;
import org.broadinstitute.sting.utils.report.tags.DataPoint;
import org.broadinstitute.sting.utils.report.utils.TableType;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
*
*/
@Analysis(name = "Allele Frequency Comparison", description = "Compare allele frequency and counts between eval and comp")
public class AlleleFrequencyComparison extends VariantEvaluator {
private static int MAX_AC_COUNT = 100; // todo -- command line argument?
@DataPoint(description="Counts of eval frequency versus comp frequency")
AFTable afTable = new AFTable();
@DataPoint(description="Counts of eval AC versus comp AC")
ACTable acTable = new ACTable(MAX_AC_COUNT);
public boolean enabled() { return true; }
public int getComparisonOrder() { return 2; }
public String getName() { return "Allele Frequency Comparison"; }
public AlleleFrequencyComparison(VariantEvalWalker parent) {
//super(parent);
}
//public String update2(VariantContext eval, VariantContext comp, RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context, VariantEvalWalker.EvaluationContext group) {
public String update2(VariantContext eval, VariantContext comp, RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
if ( ! (isValidVC(eval) && isValidVC(comp)) ) {
return null;
} else {
// todo -- this is a godawful hack. The "right way" isn't working, so do it the unsafe way for now. Note that
// todo -- this precludes getting the AC/AF values from the info field because some may not be there...
/*if ( missingField(eval) ) {
recalculateCounts(eval);
}
if ( missingField(comp) ) {
recalculateCounts(comp);
}*/
HashMap<String,Object> evalCounts = new HashMap<String,Object>(2);
HashMap<String,Object> compCounts = new HashMap<String,Object>(2);
VariantContextUtils.calculateChromosomeCounts(eval,evalCounts,false);
VariantContextUtils.calculateChromosomeCounts(comp,compCounts,false);
afTable.update(((List<Double>)evalCounts.get("AF")).get(0),((List<Double>)compCounts.get("AF")).get(0));
acTable.update(((List<Integer>)evalCounts.get("AC")).get(0),((List<Integer>)compCounts.get("AC")).get(0));
}
return null; // there is nothing interesting
}
private static boolean missingField(final VariantContext vc) {
return ! ( vc.hasAttribute(VCFConstants.ALLELE_COUNT_KEY) && vc.hasAttribute(VCFConstants.ALLELE_FREQUENCY_KEY) );
}
private void recalculateCounts(VariantContext vc) {
Map<String,Object> attributes = new HashMap<String,Object>();
VariantContextUtils.calculateChromosomeCounts(vc,attributes,false);
vc = VariantContext.modifyAttributes(vc,attributes);
//getLogger().debug(String.format("%s %s | %s %s",attributes.get("AC"),attributes.get("AF"),vc.getAttribute("AC"),vc.getAttribute("AF")));
if ( attributes.size() == 2 && missingField(vc) ) {
throw new org.broadinstitute.sting.utils.exceptions.StingException("VariantContext should have had attributes modified but did not");
}
}
private static boolean isValidVC(final VariantContext vc) {
return (vc != null && !vc.isFiltered() && vc.getAlternateAlleles().size() == 1);
}
private static double getAF(VariantContext vc) {
Object af = vc.getAttribute(VCFConstants.ALLELE_FREQUENCY_KEY);
if ( af == null ) {
//throw new UserException("Variant context "+vc.getName()+" does not have allele frequency entry which is required for this walker");
// still none after being re-computed; this is 0.00
return 0.00;
} else if ( List.class.isAssignableFrom(af.getClass())) {
return ( (List<Double>) af ).get(0);
} else if ( String.class.isAssignableFrom(af.getClass())) {
// two possibilities
String s = (String) af;
try {
if ( s.startsWith("[") ) {
return Double.parseDouble(s.replace("\\[","").replace("\\]",""));
} else {
return Double.parseDouble(s);
}
} catch (NumberFormatException e) {
throw new UserException("Allele frequency field may be improperly formatted, found AF="+s,e);
}
} else if ( Double.class.isAssignableFrom(vc.getAttribute(VCFConstants.ALLELE_FREQUENCY_KEY).getClass())) {
return (Double) af;
} else {
throw new UserException(String.format("Class of Allele Frequency does not appear to be formated, had AF=%s, of class %s",af.toString(),af.getClass()));
}
}
private static int getAC(VariantContext vc) {
Object ac = vc.getAttribute(VCFConstants.ALLELE_COUNT_KEY);
if ( ac == null ) {
// still none after being re computed; this is 0
return 0;
} else if ( List.class.isAssignableFrom(ac.getClass())) {
return ( (List<Integer>) ac ).get(0);
} else if ( String.class.isAssignableFrom(ac.getClass())) {
// two possibilities
String s = (String) ac;
try {
if ( s.startsWith("[") ) {
return Integer.parseInt(s.replace("\\[","").replace("\\]",""));
} else {
return Integer.parseInt(s);
}
} catch (NumberFormatException e) {
throw new UserException(String.format("Allele count field may be improperly formatted, found AC=%s for record %s:%d",ac,vc.getChr(),vc.getStart()),e);
}
} else if ( Integer.class.isAssignableFrom(ac.getClass())) {
return (Integer) ac;
} else {
throw new UserException(String.format("Class of Allele Frequency does not appear to be formated, had AF=%s, of class %s",ac.toString(),ac.getClass()));
}
}
}
class AFTable implements TableType {
protected int[][] afCounts = new int[101][101];
public Object[] getRowKeys() {
String[] afKeys = new String[101];
for ( int f = 0; f < 101; f ++ ) {
afKeys[f] = String.format("%.2f",(f+0.0)/100.0);
}
return afKeys;
}
public Object[] getColumnKeys() {
return getRowKeys(); // nice thing about symmetric tables
}
public Object getCell(int i, int j) {
return afCounts[i][j];
}
public String getName() {
return "Allele Frequency Concordance";
}
public void update(double eval, double comp) {
afCounts[af2index(eval)][af2index(comp)]++;
}
private int af2index(double d) {
return (int) Math.round(100*d);
}
}
class ACTable implements TableType {
protected int[][] acCounts;
protected int maxAC;
public ACTable(int acMaximum) {
maxAC = acMaximum;
acCounts = new int[acMaximum+1][acMaximum+1];
}
public Object[] getRowKeys() {
String[] acKeys = new String[maxAC+1];
for ( int i = 0 ; i <= maxAC ; i ++ ) {
acKeys[i] = String.format("%d",i);
}
return acKeys;
}
public Object[] getColumnKeys() {
return getRowKeys();
}
public Object getCell(int i, int j) {
return acCounts[i][j];
}
public String getName() {
return "Allele Counts Concordance";
}
public void update(int eval, int comp) {
eval = eval > maxAC ? maxAC : eval;
comp = comp > maxAC ? maxAC : comp;
acCounts[eval][comp]++;
}
}

View File

@ -1,219 +0,0 @@
package org.broadinstitute.sting.oneoffprojects.walkers.varianteval;
import org.broad.tribble.util.variantcontext.VariantContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.variantcontext.VariantContextUtils;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.varianteval.VariantEvalWalker;
import org.broadinstitute.sting.gatk.walkers.varianteval.evaluators.VariantEvaluator;
import org.broadinstitute.sting.gatk.walkers.varianteval.tags.Analysis;
import org.broadinstitute.sting.gatk.walkers.varianteval.tags.DataPoint;
import org.broadinstitute.sting.utils.report.utils.TableType;
import org.broadinstitute.sting.utils.analysis.AminoAcid;
import org.broadinstitute.sting.utils.analysis.AminoAcidTable;
import org.broadinstitute.sting.utils.analysis.AminoAcidUtils;
import org.broadinstitute.sting.utils.exceptions.UserException;
/*
* Copyright (c) 2010 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
/**
* @author chartl
* @since June 28, 2010
*/
@Analysis(name = "Amino Acid Transition", description = "Calculates the Transition Matrix for coding variants; entries are Total, Num. Ti, Num. Tv, Ratio")
public class AminoAcidTransition extends VariantEvaluator {
////////////////////////////////////////////////////////////
//// INTERNAL DATA POINT CLASSES
////////////////////////////////////////////////////////////
// a mapping from amino acid transition score histogram bin to Ti/Tv ratio
@DataPoint(description = "TiTv counts by amino acid change")
AminoAcidTiTvTable acidTable = null;
class TiTvCount {
public int ti;
public int tv;
public TiTvCount() {
ti = 0;
tv = 0;
}
public int getTotal() {
return ti + tv;
}
public double getRatio() {
return ( (double) ti )/(1.0+tv);
}
public String toString() {
return String.format("%d:%d:%d:%.2f",getTotal(),ti,tv,getRatio());
}
}
class AminoAcidTiTvTable implements TableType {
private TiTvCount[][] countsByAAChange;
public AminoAcidTiTvTable() {
countsByAAChange = new TiTvCount[AminoAcid.values().length][AminoAcid.values().length];
for ( int i = 0; i < AminoAcid.values().length; i ++ ) {
for ( int j = 0; j < AminoAcid.values().length; j++ ) {
countsByAAChange[i][j] = new TiTvCount();
}
}
}
public Object[] getRowKeys() {
return AminoAcidUtils.getAminoAcidCodes();
}
public Object[] getColumnKeys() {
return AminoAcidUtils.getAminoAcidCodes();
}
public TiTvCount getCell(int x, int y) {
return countsByAAChange[x][y];
}
public String getName() {
return "AminoAcidTransitionTable";
}
public void update(AminoAcid reference, AminoAcid alternate, boolean isTransition) {
TiTvCount counter = countsByAAChange[reference.ordinal()][alternate.ordinal()];
if ( isTransition ) {
counter.ti++;
} else {
counter.tv++;
}
}
}
////////////////////////////////////////////////////////////
//// CORE VARIANT EVALUATOR DATA AND METHODS
////////////////////////////////////////////////////////////
private String infoKey;
private String infoValueSplit;
private boolean useCodons;
private boolean enabled;
private AminoAcidTable lookup;
public AminoAcidTransition(VariantEvalWalker parent) {
//super(parent);
//enabled = parent.aminoAcidTransitionKey != null;
enabled = true;
if ( enabled ) {
getParsingInformation(parent);
lookup = new AminoAcidTable();
acidTable = new AminoAcidTiTvTable();
}
}
private void getParsingInformation(VariantEvalWalker parent) {
if ( enabled() ) {
// infoKey = parent.aminoAcidTransitionKey;
// infoValueSplit = parent.aminoAcidTransitionSplit;
// useCodons = parent.aatUseCodons;
infoKey = null;
infoValueSplit = null;
useCodons = false;
if ( infoKey == null ) {
throw new UserException.CommandLineException("No info-field key provided for amino acid tabulation. Please provide the appropriate key with -aatk.");
}
if ( infoValueSplit == null ) {
throw new UserException.CommandLineException("No split string provided for amino acid tabulation. Please provide the split string with -aats");
}
}
}
public String getName() {
return "AminoAcidTransitionTable";
}
public int getComparisonOrder() {
return 1; // we only need to see each eval track
}
public boolean enabled() {
return enabled;
}
public String toString() {
return getName();
}
public String update1(VariantContext eval, RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
String interesting = null;
//if ( eval != null && eval.hasAttribute(infoKey) ) {
if ( enabled && eval != null && eval.hasAttribute(infoKey) ) {
String[] parsedNames = ( (String) eval.getAttribute(infoKey)).split(infoValueSplit);
String first = "none";
String second = "none";
try {
first = parsedNames [0];
second = parsedNames [1];
} catch (ArrayIndexOutOfBoundsException e) {
//getLogger().warn("Error parsing variant context with value "+eval.getAttribute(infoKey));
}
AminoAcid reference;
AminoAcid alternate;
if ( useCodons ) {
reference = lookup.getEukaryoticAA(first);
alternate = lookup.getEukaryoticAA(second);
} else {
reference = lookup.getAminoAcidByCode(first);
alternate = lookup.getAminoAcidByCode(second);
}
//veWalker.getLogger().info(String.format("%s\t%s\t%s\t%s",first,second,reference,alternate));
if ( reference == null ) {
interesting = "Unknown Reference Codon";
} else if ( alternate == null ) {
interesting = "Unknown Alternate Codon";
} else {
acidTable.update(reference,alternate, VariantContextUtils.isTransition(eval));
}
}
return interesting; // This module doesn't capture any interesting sites, so return null
}
//public void finalizeEvaluation() {
//
//}
}

View File

@ -9,9 +9,8 @@ import org.broadinstitute.sting.gatk.walkers.varianteval.VariantEvalWalker;
import org.broadinstitute.sting.gatk.walkers.varianteval.evaluators.VariantEvaluator;
import org.broadinstitute.sting.gatk.walkers.varianteval.tags.Analysis;
import org.broadinstitute.sting.gatk.walkers.varianteval.tags.DataPoint;
import org.broadinstitute.sting.utils.report.utils.TableType;
import org.broadinstitute.sting.gatk.walkers.varianteval.util.TableType;
import java.util.Arrays;
import java.util.Collection;
import java.util.Set;

View File

@ -244,4 +244,24 @@ public class UserException extends ReviewedStingException {
super(String.format("Unable to execute QScript: " + message), e);
}
}
public static class CouldNotCreateReferenceIndexFile extends UserException {
public CouldNotCreateReferenceIndexFile(File f, Exception e) {
this(f, "", e);
}
public CouldNotCreateReferenceIndexFile(File f, String message, Exception e) {
super(String.format("Index file %s does not exist but could not be created because: %s. ", f, message)
+ (e == null ? "" : e.getMessage()));
}
}
public static class CouldNotCreateReferenceIndexFileBecauseOfLock extends UserException.CouldNotCreateReferenceIndexFile {
public CouldNotCreateReferenceIndexFileBecauseOfLock(File f) {
super(f, "could not be written because an exclusive file lock could not be obtained. " +
"If you are running multiple instances of GATK, another GATK process is " +
"probably creating this file now, and has locked it. Please wait until this process finishes " +
"and try again.", null);
}
}
}

View File

@ -1,127 +0,0 @@
/*
* Copyright (c) 2010. The Broad Institute
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.utils.report;
import org.broadinstitute.sting.utils.report.tags.Analysis;
import org.broadinstitute.sting.utils.report.tags.DataPoint;
import org.broadinstitute.sting.utils.report.tags.Param;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* @author aaron
* <p/>
* Class AnalysisModuleScanner
* <p/>
* Given an analysis, find the annotated fields and methods. Given this module and
* the object, a Mashalling object can serialize or deserialize a analysis module.
*/
public class AnalysisModuleScanner {
// what we extracted from the class
private Map<Field, Param> parameters = new LinkedHashMap<Field, Param>(); // the parameter annotations
private Map<Field, DataPoint> datums = new LinkedHashMap<Field, DataPoint>(); // the data we've discovered
private Analysis analysis; // the analysis annotation
// private storage of the class type
private final Class cls;
/**
* create a report scanner from the passed in class
* @param cls the target class, annotated with the @Analysis annotation
*/
public AnalysisModuleScanner(Class cls) {
this.cls = cls;
scan(); // scan the passed in class
}
/**
* create a report scanner from the passed in class
* @param obj the target object, annotated with the @Analysis annotation
*/
public AnalysisModuleScanner(Object obj) {
this.cls = obj.getClass();
scan(); // scan the passed in class
}
/** scan the class and find all appropriate fields and tables */
public void scan() {
if (cls == null || !cls.isAnnotationPresent(Analysis.class))
throw new ReviewedStingException("The class passed in cannot be null, " + "" +
"and must contain the @Analysis annotation, class " + cls + " was the input");
// get the annotation off of the class
analysis = (Analysis) cls.getAnnotation(Analysis.class);
scanFields();
}
/**
* scan the fields of the class, extracting parameters and table annotations and their associated fields
*/
private void scanFields() {
// get the fields from the class, and extract
for ( Class superCls = cls; superCls != null; superCls=superCls.getSuperclass() ) {
for (Field f : superCls.getDeclaredFields())
for (Annotation annotation : f.getAnnotations()) {
if (annotation.annotationType().equals(Param.class))
parameters.put(f, (Param) annotation);
if (annotation.annotationType().equals(DataPoint.class))
datums.put(f,(DataPoint) annotation);
}
}
}
/**
*
* @return get the list of parameters we found
*/
public Map<Field, Param> getParameters() {
return parameters;
}
/**
*
* @return a map of the datum annotations found
*/
public Map<Field, DataPoint> getData() {
return datums;
}
/**
*
* @return the analysis annotation found
*/
public Analysis getAnalysis() {
return analysis;
}
public Class getModuleClass() {
return cls;
}
}

View File

@ -1,203 +0,0 @@
/*
* Copyright (c) 2010. The Broad Institute
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.utils.report;
import org.broadinstitute.sting.utils.report.templates.ReportFormat;
import org.broadinstitute.sting.utils.report.utils.ComplexDataUtils;
import org.broadinstitute.sting.utils.report.utils.Node;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import java.io.*;
import java.lang.reflect.Field;
import java.text.DateFormat;
import java.util.*;
/**
* @author aaron
* <p/>
* Class ReportMarshaller
* <p/>
* marshall report data out of the GATK.
*/
public class ReportMarshaller {
private ReportFormat temp;
// the aggregation of all our analyses
private Node root;
private File outputFileLocation;
private Writer outputWriter;
/**
* create a marshaled object
*
* @param reportName the report name
* @param template the template to use
*/
public ReportMarshaller(String reportName, File filename, ReportFormat template, List<Node> reportTags) {
temp = template;
this.outputFileLocation = filename;
createRootNode(reportName, reportTags);
}
/**
* create a marshaled object
*
* @param reportName the report name
*/
public ReportMarshaller(String reportName, Writer writer, ReportFormat template, List<Node> reportTags) {
this.outputWriter = writer;
temp = template;
createRootNode(reportName, reportTags);
}
/**
* create the root node
* @param reportName the report name
* @param reportTags the report type
*/
private void createRootNode(String reportName, List<Node> reportTags) {
root = new Node("report", reportName, DateFormat.getDateTimeInstance().format(new Date()));
root.addChild(new Node("title", reportName, "title of the report"));
for (Node n : reportTags) {
n.setTag();
root.addChild(n);
}
}
/**
* add an analysis module to the output source
*
* @param toMarshall the object to marshall
*/
public void write(Object toMarshall) {
// Create a context to add data to
HashMap analysisMap = new HashMap();
AnalysisModuleScanner moduleScanner = new AnalysisModuleScanner(toMarshall);
Node analysis = addAnalysis(moduleScanner);
analysis.addAllChildren(getParameterNodes(toMarshall, moduleScanner));
analysis.addAllChildren(getDataPointNodes(toMarshall, moduleScanner));
// add this analysis to the root node
root.addChild(analysis);
}
/**
* add an analysis module to the output source
*
* @param toMarshall the object to marshall
*/
public void write(List<Node> tags, Object toMarshall) {
AnalysisModuleScanner moduleScanner = new AnalysisModuleScanner(toMarshall);
Node analysis = addAnalysis(moduleScanner);
// prepend the list of nodes passed in
Node currChild = analysis;
for (Node n : tags) {
n.setTag();
currChild.addChild(n);
}
root.addChild(analysis);
currChild.addAllChildren(getDataPointNodes(toMarshall, moduleScanner));
currChild.addAllChildren(getParameterNodes(toMarshall, moduleScanner));
}
private Node addAnalysis(AnalysisModuleScanner moduleScanner) {
return new Node("analysis", moduleScanner.getAnalysis().name(), moduleScanner.getAnalysis().description());
}
/**
* collect the Params objects annotated on the target object
*
* @param toMarshall the object to output
* @param moduleScanner our scanner, which stores the annotated field information
* @return a pair of a string and the list of Chunk objects
*/
private Collection<Node> getParameterNodes(Object toMarshall, AnalysisModuleScanner moduleScanner) {
Collection<Node> nodes = new ArrayList<Node>();
for (Field f : moduleScanner.getParameters().keySet()) {
Node node = new Node("parameter",
moduleScanner.getParameters().get(f).name().equals("") ? f.getName() : moduleScanner.getParameters().get(f).name(),
moduleScanner.getParameters().get(f).description());
addChildNodeFromField(toMarshall, f, node);
nodes.add(node);
}
return nodes;
}
/**
* collect the DataPoint objects annotated on the target object
*
* @param toMarshall the object to output
* @param moduleScanner our scanner, which stores the annotated field information
* @return a pair of a string and the list of Chunk objects
*/
private Collection<Node> getDataPointNodes(Object toMarshall, AnalysisModuleScanner moduleScanner) {
Collection<Node> nodes = new ArrayList<Node>();
for (Field f : moduleScanner.getData().keySet()) {
Node node = new Node("data_point",
moduleScanner.getData().get(f).name().equals("") ? f.getName() : moduleScanner.getData().get(f).name(),
moduleScanner.getData().get(f).description());
addChildNodeFromField(toMarshall, f, node);
nodes.add(node);
}
return nodes;
}
/**
* call the method to finalize the report
*/
public void close() {
if (outputFileLocation != null) temp.write(outputFileLocation, root);
else temp.write(outputWriter, root);
temp.close();
}
/**
* helper method for adding a Node to the specified node, given the field
*
* @param toMarshall the object which contains the specified field
* @param f the field
* @param node the node to add a child node to
*/
private static void addChildNodeFromField(Object toMarshall, Field f, Node node) {
f.setAccessible(true);
try {
Collection<Node> nodes = ComplexDataUtils.resolveObjects(f.get(toMarshall));
// we want to eliminate any data nodes that are there just to incorporate an underlying table
if (nodes.size() == 1 && nodes.iterator().next().table==true)
node.clone(nodes.iterator().next());
else
node.addAllChildren(nodes);
} catch (IllegalAccessException e) {
throw new ReviewedStingException("Unable to access field " + f);
}
}
}

View File

@ -1,91 +0,0 @@
package org.broadinstitute.sting.utils.report;
import org.broadinstitute.sting.utils.report.templates.*;
import org.broadinstitute.sting.utils.report.utils.Node;
import org.broadinstitute.sting.utils.exceptions.DynamicClassResolutionException;
import java.io.*;
import java.util.List;
/**
*
* @author aaron
*
* Class VE2ReportFactory
*
* create ReportMarshaller from writers and template types
*/
public class VE2ReportFactory {
// where templates are stored
public static final String ve2templateDir = "templates/";
// our default output type
public static final VE2TemplateType defaultReportFormat = VE2TemplateType.Table;
/** the types of templates we're aware of for VariantEval2 */
public enum VE2TemplateType {
Table(TableFormat.class),
Grep(GrepFormat.class),
CSV(CSVFormat.class),
R(RFormat.class);
public Class underlyingReportType;
VE2TemplateType(Class<? extends ReportFormat> type) {
underlyingReportType = type;
}
}
/**
* create a report ReportMarshaller from a writer, type, and any report tags
* @param writeTo the output location
* @param type the VE2TemplateType type
* @param reportTags the tags to append to each report root node
* @return a list of ReportMarshallers to write data to
*/
public static ReportMarshaller createMarhsaller(File writeTo,VE2TemplateType type, List<Node> reportTags) {
if (!isCompatibleWithOutputType(ReportFormat.AcceptableOutputType.FILE,type))
throw new IllegalArgumentException("Report format " + type + " does not support an output parameter of type " + ReportFormat.AcceptableOutputType.FILE);
return new ReportMarshaller("Variant Eval 2 Report",writeTo,createByType(type.underlyingReportType),reportTags);
}
/**
* create a report ReportMarshaller from a writer, type, and any report tags
*
* @param writer the output object
* @param type the VE2TemplateType type
* @param reportTags the tags to append to each report root node
*
* @return a list of ReportMarshallers to write data to
*/
public static ReportMarshaller createMarhsaller(Writer writer, VE2TemplateType type, List<Node> reportTags) {
if (!isCompatibleWithOutputType(ReportFormat.AcceptableOutputType.STREAM,type))
throw new IllegalArgumentException("Report format " + type + " does not support an output parameter of type " + ReportFormat.AcceptableOutputType.STREAM);
return new ReportMarshaller("Variant Eval 2 Report",writer,createByType(type.underlyingReportType),reportTags);
}
/**
* check that the proposed output type
* @param output the output type we're proposing
* @param type the report format we'd like to use
*/
public static boolean isCompatibleWithOutputType( ReportFormat.AcceptableOutputType output, VE2TemplateType type) {
ReportFormat format = createByType(type.underlyingReportType);
return (format.getAcceptableOutputTypes().contains(output));
}
/**
* create a report formatter with the given type
*
* @param formatType type of the reporter to create.
*
* @return The reporter object if created; null otherwise.
*/
public static ReportFormat createByType(Class formatType) {
try {
return ((Class<? extends ReportFormat>) formatType).newInstance();
} catch (Exception e) {
throw new DynamicClassResolutionException(formatType, e);
}
}
}

View File

@ -1,18 +0,0 @@
package org.broadinstitute.sting.utils.report.tags;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* @author aaron
* <p/>
* Annotation Analysis
* <p/>
* the main annotation for analysis objects in the report system
*/
@Retention(RetentionPolicy.RUNTIME)
public @interface Analysis {
String name() default ""; // the name of the analysis
String description(); // its description, required
String version() default ""; // the version, not always used
}

View File

@ -1,41 +0,0 @@
/*
* Copyright (c) 2010. The Broad Institute
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.utils.report.tags;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* @author aaron
* <p/>
* Annotation DataPoint
* <p/>
* The basic DataPoint annotation, for fields in an analysis that
* are to be output as data.
*/
@Retention(RetentionPolicy.RUNTIME)
public @interface DataPoint {
String name() default ""; // the name, required
String description(); // a description
}

View File

@ -1,19 +0,0 @@
package org.broadinstitute.sting.utils.report.tags;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* @author aaron
* <p/>
* Annotation Param
* <p/>
* a description annotation for a parameter; a variable used as input to the
* analysis, but not (nessasarally) an output. Some formats will store this
* information in comments, others will not include it.
*/
@Retention(RetentionPolicy.RUNTIME)
public @interface Param {
String name() default ""; // the name, defaulted to the variable name
String description(); // the description of the parameter
}

View File

@ -1,59 +0,0 @@
package org.broadinstitute.sting.utils.report.templates;
/**
* the basic comma separated value format
*/
public class CSVFormat extends TableBasedFormat {
private static final String DIVIDER = ",";
private static final String extension = ".csv";
/**
* format the string according to our internal rules
*
* @param str the string to format
* @return a string, properly formatted
*/
@Override
public String formatColumn(String str) {
return str+DIVIDER;
}
/**
* should we add readability marks?
*
* @return true if we should (line breaks, etc)
*/
@Override
public boolean addReadabilityMarks() {
return false;
}
/**
* a string to prepend for header lines
*
* @return a string, blank if no string to be appended
*/
@Override
public String headerIndicator() {
return "#";
}
/**
* should we split the separate files by analysis
*
* @return
*/
@Override
public boolean splitFilesByAnalysis() {
return false;
}
/**
* what extension do we want our files to have
*
* @return a string of the extension
*/
@Override
public String extension() {
return extension;
}
}

View File

@ -1,101 +0,0 @@
package org.broadinstitute.sting.utils.report.templates;
import org.broadinstitute.sting.utils.report.utils.Node;
import org.broadinstitute.sting.utils.exceptions.UserException;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.io.Writer;
import java.util.EnumSet;
/**
*
* @author aaron
*
* Class GrepFormat
*
* implements the grep output format
*/
public class GrepFormat implements ReportFormat {
private PrintWriter stream;
/**
* write out to the writer, given the root node
* @param baseFile the file to write to
* @param baseNode the root node
*/
@Override
public void write(File baseFile, Node baseNode) {
try {
stream = new PrintWriter(baseFile);
} catch (FileNotFoundException e) {
throw new UserException.CouldNotCreateOutputFile(baseFile, e);
}
privateWrite(baseNode);
}
/**
* write out to the writer, given the root node
*
* @param baseFile the file to write to
* @param baseNode the root node
*/
@Override
public void write(Writer baseFile, Node baseNode) {
stream = new PrintWriter(baseFile);
privateWrite(baseNode);
}
/**
* write out the node
* @param baseNode the base (root) node
*/
private void privateWrite(Node baseNode) {
for (Node analysis : baseNode.getChildren()) {
StringBuilder builder = new StringBuilder();
boolean first = true;
for (Node tag : analysis.getChildren()) {
if (first) first = false;
else if (tag.tag) {
builder.append(".");
}
if ( tag.tag ) builder.append("["+tag.getName() + "=" + tag.getValue()+"]");
}
recursiveTraverse(analysis,builder.toString());
}
}
/**
* recursively get the data. If we hit a final node, output the node plus the trailing text
* @param n the node we're looking at
* @param value the previous text we've seen
*/
public void recursiveTraverse(Node n, String value) {
if (n.tag) return;
if (n.getChildren().size() < 1) {
stream.println(value + " " + n.getValue());
}
else {
String nString = n.getName() + "=" +n.getValue();
for (Node child : n.getChildren())
recursiveTraverse(child,value + ".[" + nString + "]");
}
}
@Override
public void close() {
stream.close();
}
/**
* return the valid outputs we support
* @return
*/
public EnumSet<AcceptableOutputType> getAcceptableOutputTypes() {
EnumSet<AcceptableOutputType> set = EnumSet.of(AcceptableOutputType.FILE); // always acceptable
set.add(AcceptableOutputType.STREAM);
return set;
}
}

View File

@ -1,75 +0,0 @@
package org.broadinstitute.sting.utils.report.templates;
import org.broadinstitute.sting.utils.report.utils.Node;
import java.io.PrintWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
*
* @author aaron
*
* Class RFormat
*
* a format for outputting R data - experimental
*/
public class RFormat extends TableBasedFormat {
private final String DIVIDER = ",";
private static final String extension = ".csv";
/**
* format the string according to our internal rules
*
* @param str the string to format
* @return a string, properly formatted
*/
@Override
public String formatColumn(String str) {
return str+DIVIDER;
}
/**
* should we add readability marks?
*
* @return true if we should (line breaks, etc)
*/
@Override
public boolean addReadabilityMarks() {
return false;
}
/**
* a string to prepend for header lines
*
* @return a string, blank if no string to be appended
*/
@Override
public String headerIndicator() {
return "#";
}
/**
* should we split the seperate files by analysis
*
* @return
*/
@Override
public boolean splitFilesByAnalysis() {
return true;
}
/**
* what extension do we want our files to have
*
* @return a string of the extension
*/
@Override
public String extension() {
return extension;
}
}

View File

@ -1,22 +0,0 @@
package org.broadinstitute.sting.utils.report.templates;
import org.broadinstitute.sting.utils.report.utils.Node;
import java.io.File;
import java.io.Writer;
import java.util.EnumSet;
/**
* @author aaron
* <p/>
* Interface ReportFormat
* <p/>
* The basics of a report formatter
*/
public interface ReportFormat {
public enum AcceptableOutputType { STREAM, FILE };
public EnumSet<AcceptableOutputType> getAcceptableOutputTypes();
public void write(File fileLocation, Node baseNode);
public void write(Writer writeLocation, Node baseNode);
public void close();
}

View File

@ -1,295 +0,0 @@
package org.broadinstitute.sting.utils.report.templates;
import org.broadinstitute.sting.utils.report.utils.Node;
import org.broadinstitute.sting.utils.exceptions.UserException;
import java.io.*;
import java.util.*;
/**
* an abstract class to share the basics of a table based format; many methods
* overlap in different output types.
*/
public abstract class TableBasedFormat implements ReportFormat {
private Map<String, List<Node>> analyses = new HashMap<String, List<Node>>();
private PrintWriter stream;
private File baseLocation;
/**
* write the base node to the specified writer
* @param writeTo the file base to write to
* @param baseNode the root node
*/
@Override
public void write(File writeTo, Node baseNode) {
baseLocation = writeTo;
// if there is only a single output file, create it
if (!splitFilesByAnalysis()) newStream("");
traverseAnalysisNodes(baseNode);
}
/**
* write the base node to the specified writer
* @param writeLocation the writer to write to
* @param baseNode the root node
*/
public void write(Writer writeLocation, Node baseNode) {
if (splitFilesByAnalysis()) throw new UserException.CommandLineException("Unable to write output report, we require a file input for multi-file formats");
// if there is only a single output file, create it
stream = new PrintWriter(writeLocation);
traverseAnalysisNodes(baseNode);
stream.flush();
}
/**
* traverse the analysis nodes, outputting to our stream
* @param baseNode the base (root) node, with analysis nodes as children
*/
private void traverseAnalysisNodes(Node baseNode) {
getAnalyses(baseNode);
for (String s : analyses.keySet()) {
writeAnalysis(analyses.get(s));
outputTables(analyses.get(s));
}
}
/**
* break out the analyses by type, given the base node
* @param baseNode the root node
*/
private void getAnalyses(Node baseNode) {
for (Node n : baseNode.getChildren())
if (!n.tag && n.getComplex()) {
if (!analyses.containsKey(n.getValue()))
analyses.put(n.getValue(),new ArrayList<Node>());
analyses.get(n.getValue()).add(n);
}
}
/**
* write the analysis nodes out, only outputting the simple data points (non-table data)
* @param nodes a list of nodes, of the same analysis type
*/
private void writeAnalysis(List<Node> nodes) {
if (nodes.size() < 1 || !nodes.get(0).getName().equals("analysis")) return;
Node forTitle = nodes.get(0);
newStream(forTitle.getValue());
stream.println(headerIndicator() + "Analysis Name: \t" + forTitle.getValue());
stream.println(headerIndicator() + "Analysis Description: \t" + forTitle.getDescription());
if (addReadabilityMarks()) stream.println();
String header = extractHeaderString(forTitle);
if (header == null) return; // a null here indicates we don't have any unique columns to display
stream.println(trimLastChar(header));
if (addReadabilityMarks()) stream.println(niceDivider(header.length()));
for (Node analysis : nodes) {
String dataString = dataPointNodesToValues(analysis);
if (dataString.length() > 0 && !dataString.equals("<null>")) {
stream.print(getTagValues(analysis));
stream.println(trimLastChar(dataString));
}
}
if (addReadabilityMarks()) stream.println();
stream.println();
}
/**
* output the tables: look at list of analysis nodes (all from the same analysis) and output the table
* @param nodes the list of analysis nodes (of the same underlying type)
*/
public void outputTables(List<Node> nodes) {
Map<String,List<String>> tableRows = new HashMap<String,List<String>>();
Map<String,String> tableHeaders = new HashMap<String,String>();
for (Node analysis : nodes)
for (Node n : analysis.getChildren()) {
if (n.table) {
StringBuilder columnBuilder = new StringBuilder();
getTagNames(analysis,columnBuilder);
for (Node row : n.getChildren()) {
StringBuilder rowBuilder = new StringBuilder();
rowBuilder.append(getTagValues(analysis));
rowBuilder.append(formatColumn(row.getValue()));
columnBuilder.append(formatColumn(row.getName()));
for (Node column : row.getChildren()) {
columnBuilder.append(formatColumn(column.getValue()));
if (column.getChildren().size() == 1) {
String value = formatColumn(column.getChildren().iterator().next().getValue());
rowBuilder.append(value);
}
}
if (!tableRows.containsKey(n.getValue()))
tableRows.put(n.getValue(),new ArrayList<String>());
tableRows.get(n.getValue()).add(rowBuilder.toString());
if (!tableHeaders.containsKey(n.getValue()))
tableHeaders.put(n.getValue(),columnBuilder.toString());
}
}
}
// output the tables
for (String tableName : tableHeaders.keySet()) {
newStream(tableName);
stream.println(headerIndicator() + "Table Name : " + tableName);
stream.println(trimLastChar(tableHeaders.get(tableName)));
if (addReadabilityMarks()) stream.println(niceDivider(tableHeaders.get(tableName).length()));
List<String> rows = tableRows.get(tableName);
for (String row : rows)
stream.println(trimLastChar(row));
if (addReadabilityMarks()) stream.println();
}
}
public String trimLastChar(String toTrim) {
return toTrim.substring(0,toTrim.length()-1);
}
/**
* get the header (tag) names
* @param analysis the analysis node
* @return a string representing the tag names
*/
private String getTagValues(Node analysis) {
StringBuilder buffer = new StringBuilder();
for (Node s : analysis.getChildren())
if (s.tag) buffer.append(formatColumn(s.getValue()));
return buffer.toString();
}
/**
* simple data points describe themselves, and have one child that stores their value and it's description. Extract the value and
* convert the list of nodes to a string
* @param analysis the analysis
* @return a String representing the values
*/
private String dataPointNodesToValues(Node analysis) {
StringBuilder builder = new StringBuilder();
for (Node n : analysis.getChildren()) {
if (!n.tag && !n.table) {
if (n.getChildren().size() > 1) throw new IllegalStateException("Simple data points shouldn't have more than one value");
if (n.getChildren().size() == 1)
builder.append(formatColumn(n.getChildren().iterator().next().getValue()));
}
}
return builder.toString();
}
/**
* extract the header string from the base analysis node
*/
private String extractHeaderString(Node analysisNode) {
StringBuilder buffer = new StringBuilder();
// first get the tags
getTagNames(analysisNode, buffer);
if (!getColumnNames(analysisNode, buffer))
return null;
return buffer.toString();
}
/**
* get the column names from the analysis node
* @param analysisNode the node
* @param buffer the buffer to append to
* @return true if there was data fields to output, false if we dont add data to the column header list
*/
private boolean getColumnNames(Node analysisNode, StringBuilder buffer) {
// now get the simple data points
boolean addedValue = false;
for (Node n : analysisNode.getChildren())
if (!n.tag && !n.table) {
addedValue = true;
buffer.append(formatColumn(n.getValue()));
}
return addedValue;
}
/**
* get the tags names from an analysis node
* @param analysisNode the node
* @param buffer the StringBuilder to append to
*/
private void getTagNames(Node analysisNode, StringBuilder buffer) {
for (Node n : analysisNode.getChildren())
if (n.tag) buffer.append(formatColumn(n.getName()));
}
/**
* this function checks whether we need to create a new stream for the specified analysis
*/
public void newStream(String analysisOrTableName) {
String name = analysisOrTableName.replaceAll("\\s+","_").replaceAll("\\/","_slash_");
File file = new File(this.baseLocation + "." + name + this.extension());
if (stream == null || splitFilesByAnalysis()) {
if (stream != null) stream.close();
try {
stream = new PrintWriter(file);
} catch (FileNotFoundException e) {
throw new UserException.CouldNotCreateOutputFile(file, e);
}
}
}
/**
* return the valid outputs we support
* @return
*/
public EnumSet<AcceptableOutputType> getAcceptableOutputTypes() {
EnumSet<AcceptableOutputType> set = EnumSet.of(AcceptableOutputType.FILE); // always acceptable
if (!splitFilesByAnalysis()) set.add(AcceptableOutputType.STREAM);
return set;
}
/**
* create a correct-length divider string
* @param length the length for the divider
* @return a string with the divider text of length "length"
*/
private String niceDivider(int length) {
StringBuilder builder = new StringBuilder();
for (int x = 0; x < length; x++) builder.append("-");
return builder.toString();
}
/**
* close the output file, if open
*/
public void close() {
if (stream != null) stream.close();
}
/**
* format the string according to our internal rules
* @param str the string to format
* @return a string, properly formatted
*/
public abstract String formatColumn(String str);
/**
* should we add readability marks?
* @return true if we should (line breaks, etc)
*/
public abstract boolean addReadabilityMarks();
/**
* a string to prepend for header lines
* @return a string, blank if no string to be appended
*/
public abstract String headerIndicator();
/**
* should we split the separate files by analysis
* @return
*/
public abstract boolean splitFilesByAnalysis();
/**
* what extension do we want our files to have
* @return a string of the extension
*/
public abstract String extension();
}

View File

@ -1,92 +0,0 @@
/*
* Copyright (c) 2010 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.utils.report.templates;
/**
*
* @author aaron
*
* Class TableFormat
*
* implements the table (human readable) format.
*/
public class TableFormat extends TableBasedFormat {
private static final int COLUMN_WIDTH = 25;
private static final String TBL = "tbl";
/**
* format the string according to our internal rules
*
* @param str the string to format
* @return a string, properly formatted
*/
@Override
public String formatColumn(String str) {
return String.format("%-"+COLUMN_WIDTH+"s",str);
}
/**
* should we add readability marks?
*
* @return true if we should (line breaks, etc)
*/
@Override
public boolean addReadabilityMarks() {
return true;
}
/**
* a string to prepend for header lines
*
* @return a string, blank if no string to be appended
*/
@Override
public String headerIndicator() {
return "";
}
/**
* should we split the seperate files by analysis
*
* @return
*/
@Override
public boolean splitFilesByAnalysis() {
return false;
}
/**
* what extension do we want our files to have
*
* @return a string of the extension
*/
@Override
public String extension() {
return TBL;
}
}

View File

@ -1,180 +0,0 @@
package org.broadinstitute.sting.utils.report.templates;
import org.broadinstitute.sting.utils.exceptions.UserException;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
/**
* this class handles generating text table output
*/
class TextTable {
// make a table of the number of rows
ArrayList<ArrayList<String>> rows = new ArrayList<ArrayList<String>>();
// keep a complementary array of the sizes of each column
ArrayList<Integer> minimumSizes = new ArrayList<Integer>();
List<String> header = new ArrayList<String>();
// our default width
private static final int defaultMinimumSize = 15;
// our seperator
private String seperator = "";
// our default cell entry
private static final String defaultCell = "";
private String name;
private String description;
/**
* create a text table, with the:
* @param name name of the table
* @param description what the table represents
* @param header the header fields
*/
TextTable(String name, String description, List<String> header) {
this.name = name;
this.description = description;
for (int index = 0; index < header.size(); index++)
determineMinimumColumnWidth(header.get(index).length(),index);
this.header.addAll(header);
}
/**
* create a text table
*/
TextTable() {
}
/**
* set a cell of the table
* @param row, zero based
* @param column zero based
* @param entry cell to set
*/
public void setCell(int row, int column, String entry) {
if (rows.size() <= row)
createRow(row,column);
ArrayList<String> rowData = rows.get(row);
if (rowData.size() <= column)
for (int x = rowData.size(); x <= column; x++)
rowData.add("");
rows.get(row).set(column,entry);
determineMinimumColumnWidth(entry.length(),column);
}
private void createRow(int row, int column) {
for (int x = rows.size(); x <= row; x++) {
ArrayList<String> blank = new ArrayList<String>();
for (int col = 0; col <= column; col++)
blank.add(defaultCell);
rows.add(blank);
}
}
/**
* write the table to the writer
* @param writer the writer
*/
public void toPrintWriter(Writer writer) {
int index = 0;
for (String col : header)
writeToDisk(writer, col, index++, (index == header.size() -1));
appendEndLine(writer,"\n");
index = 0;
for (String col : header)
writeToDisk(writer, col, index++, (index == header.size() -1));
appendEndLine(writer,"\n");
for (ArrayList<String> row : rows) {
if (row.size() > header.size())
throw new IllegalStateException("More row-cells in table " + name + " then header columns");
for (int y = 0; y < header.size(); y++)
if (row.size() >= y)
writeToDisk(writer, "", y, (y == header.size() - 1));
else
writeToDisk(writer, row.get(y), y, (y == header.size() - 1));
}
try {
writer.append("\n");
} catch (IOException e) {
throw new UserException.CouldNotCreateOutputFile(writer.toString(), "Unable to write to the Writer", e);
}
}
private void appendEndLine(Writer writer, String str) {
try {
writer.append(str);
} catch (IOException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
/**
* write to disk a single string
* @param writer the writer to use
* @param str the string to write
* @param y an index of the string, for column width look-up
* @param lastVal are we the last value
*/
void writeToDisk(Writer writer, String str, int y, boolean lastVal) {
try {
writer.append(String.format("%1$-" + (getMinimumSizes(y) + 3) + "s", str));
if (y != rows.size() - 1)
writer.append(seperator);
} catch (IOException e) {
throw new UserException.CouldNotCreateOutputFile(writer.toString(), "Unable to write to the Writer", e);
}
}
/**
* get the minimum size for a column
* @param column the column index
* @return the width
*/
public int getMinimumSizes(int column) {
if (column >= minimumSizes.size())
return defaultMinimumSize;
return minimumSizes.get(column);
}
/**
* determine the minimum column width
* @param size the size of this string
* @param position the position (column)
*/
void determineMinimumColumnWidth(int size, int position) {
if (minimumSizes.size() <= position)
for (int x = minimumSizes.size(); x <= position; x++)
minimumSizes.add(x,0);
minimumSizes.set(position,(minimumSizes.get(position) > size) ? minimumSizes.get(position) : size);
}
public List<String> getHeader() {
return header;
}
public void setHeader(List<String> header) {
this.header = header;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
}

View File

@ -1,104 +0,0 @@
package org.broadinstitute.sting.utils.report.utils;
import java.util.*;
/**
* @author aaron
* <p/>
* Class ComplexDataUtils
* <p/>
* This class contains methods and techniques for breaking down complex data in the output system
*/
public class ComplexDataUtils {
/**
* convert any string -> object pairing into a string keyed tree
*
* @param obj the object
* @return a mapping of the name to the associated value tree. All non-leaf nodes will be Strings
*/
public static Collection<Node> resolveObjects(Object obj) { // TODO: fix this, we need a way to get the name of the list from the data point
Collection<Node> nodes = new ArrayList<Node>();
// the simplest case, the object is null
if (obj == null) nodes.add(new Node("<null>", "<null>", "<null>"));
// capture objects of type TableTable
else if (obj instanceof TableType)
nodes.add(tableToNode((TableType) obj, ((TableType) obj).getName()));
// try to handle maps
else if (obj instanceof Map) {
throw new UnsupportedOperationException("The report generation system is currently unable to output Maps, due to their ambiguity");
// handle collections
} else if (obj instanceof Collection)
nodes.addAll(listToNode((Collection) obj, "collection"));
// arrays
else if (obj.getClass().isArray())
nodes.addAll(listToNode(Arrays.asList(obj), "array"));
// else we have a simple object (at least try to handle it that way
else
nodes.add(extractPlainObjectOrPrimitive(obj.getClass().getSimpleName(),obj));
// return the collection of nodes we've parsed out
return nodes;
}
/**
* extract a (hopefully) primitive value
* @param obj the object
*/
private static Node extractPlainObjectOrPrimitive(String name, Object obj) {
String value = "<null>";
if (obj instanceof Float || obj instanceof Double)
value = String.format("%.4f",(Double)obj);
else
value = obj.toString();
return new Node(name, value, "value");
}
/**
* given a TableType object, make it into a tree using maps.
*
* @param table the table type to convert into a map
* @return a node representing this table
*/
private static Node tableToNode(TableType table, String name) {
Node root = new Node("table", name, "Table");
root.setTable();
Object[] rows = table.getRowKeys();
Object[] cols = table.getColumnKeys();
// add the columns names
for (int x = 0; x < table.getRowKeys().length; x++) {
Node row = new Node("row", rows[x].toString(), "a row in a table");
root.addChild(row);
for (int y = 0; y < table.getColumnKeys().length; y++) {
Node col = new Node("column", cols[y].toString(), "columns in a table");
row.addChild(col);
col.addChild(extractPlainObjectOrPrimitive("cell(" + x + "," + y + ")", table.getCell(x, y)));
}
}
return root;
}
/**
* given a Collection object, make it into a tree using maps.
*
* @param coll the collection to iterate, and turn into a list
* @return a mapping of String to Object
*/
private static Collection<Node> listToNode(Collection coll, String name) {
Collection<Node> nodes = new ArrayList<Node>();
Iterator<Object> iter = coll.iterator();
for (int x = 0; x < coll.size(); x++) {
Node value = new Node("column " + x, String.valueOf(x), "column");
value.addChild(new Node("value " + x, iter.next().toString(), "value"));
nodes.add(value);
}
return nodes;
}
}

View File

@ -1,116 +0,0 @@
package org.broadinstitute.sting.utils.report.utils;
import java.util.*;
/**
* a node, extracted using the new report output system.
*/
public class Node {
public String name;
public String value;
public String description;
public boolean display; // is this node an output node, or a node for tracking internal data? true if output node
public boolean table; // this is a hack, but I needed a way to indicate that a node was a row root node
public boolean tag;
public Collection<Node> children;
public Node(String name, String value, String description) {
this.value = value;
this.name = name;
this.description = description;
display = true;
table = false;
tag = false;
}
public Node(String name, String value, String description, boolean display) {
this.value = value;
this.name = name;
this.description = description;
this.display = display;
table = false;
tag = false;
}
public void setTable() {table = true;}
public void addChild(Node child) {
if (children == null) children = new LinkedHashSet<Node>();
children.add(child);
}
public void addAllChildren(Collection<Node> children) {
if (this.children == null) this.children = new LinkedHashSet<Node>();
this.children.addAll(children);
}
public Boolean getComplex() {
return (children != null && children.size() > 0);
}
/**
* a convenience method for adding a new sub-node with the specified value
*
* @param value the value of the sub-node
*/
public void createSubNode(String name, String value, String description) {
addChild(new Node(name, value, description));
}
public String getValue() {
return value;
}
public String getName() {
return name;
}
public String getDescription() {
return description;
}
public Collection<Node> getChildren() {
return (children == null) ? new ArrayList<Node>() : children;
}
public boolean getDisplay() {
return display;
}
public boolean getTable() {
return table;
}
public boolean getTag() {
return tag;
}
public void setTag() {
this.tag = true;
}
public void clone(Node n) {
this.name = n.name;
this.value = n.value;
this.description = n.description;
this.display = n.display;
this.table = n.table;
this.tag = n.tag;
this.children = new LinkedHashSet<Node>();
if (n.children != null) this.children.addAll(n.getChildren());
}
public List<List<Node>> getTableRows() {
List<List<Node>> ret = NodeUtils.flattenToRow(this,false);
return ret;
}
public List<List<Node>> getTableRowsNoTables() {
List<List<Node>> ret = NodeUtils.flattenToRow(this,true);
return ret;
}
public int getRowCount() {
return NodeUtils.flattenToRowCount(this);
}
}

View File

@ -1,97 +0,0 @@
package org.broadinstitute.sting.utils.report.utils;
import java.util.ArrayList;
import java.util.List;
/**
*
* @author aaron
*
* Class NodeUtils
*
* utilities for working with nodes
*/
public class NodeUtils {
static class NodeMarker {
private Node node;
public NodeMarker(Node n) {
node = n;
}
public int rowCount() {
int sum = (node.table) ? node.getChildren().size() : 1;
for (Node n : node.getChildren()) {
NodeMarker fn = new NodeMarker(n);
sum = sum * fn.rowCount();
}
return sum;
}
private boolean validLeafNode() {
return node.getChildren().size() == 0 && node.display && !node.tag;
}
private List<List<Node>> addToEachList(List<List<Node>> list) {
for (List<Node> lt : list)
lt.add(node);
return list;
}
public List<List<Node>> toRow(List<List<Node>> oldList, boolean excludeTables) {
// if we're a leaf node that isn't a tag, add it to each list
if (validLeafNode())
addToEachList(oldList);
// special case: if we've just got a single node, traverse into it
else if (node.getChildren().size() > 0 && !node.table)
for (Node n : node.children) {
oldList = new NodeMarker(n).toRow(oldList, excludeTables);
}
// when we encounter a table we want to branch into multiple rows
else if (node.table && !excludeTables) {
List<List<Node>> newList = new ArrayList<List<Node>>();
for (Node child : node.children) {
if (child.display && !child.tag) {
List<List<Node>> tempList = new ArrayList<List<Node>>();
tempList.add(new ArrayList<Node>());
tempList.get(0).add(child);
NodeMarker marker = new NodeMarker(child);
List<List<Node>> carry = marker.toRow(tempList, excludeTables);
newList.addAll(carry);
}
}
List<List<Node>> ret = new ArrayList<List<Node>>();
// permutations of each previous list and the new temp list
for (List<Node> original : oldList)
for (List<Node> lst : newList) {
List<Node> temp = new ArrayList<Node>();
temp.addAll(original);
temp.addAll(lst);
ret.add(temp);
}
return ret;
}
// be default return the old list
return oldList;
}
}
// given a node, get the number of rows it will generate
public static int flattenToRowCount(Node n) {
NodeMarker fn = new NodeMarker(n);
return fn.rowCount();
}
// given a node, generate rows (flattening tables)
public static List<List<Node>> flattenToRow(Node n, boolean excludeTables) {
NodeMarker fn = new NodeMarker(n);
List<List<Node>> nodesList = new ArrayList<List<Node>>();
nodesList.add(new ArrayList<Node>());
return fn.toRow(nodesList, excludeTables);
}
}

View File

@ -1,63 +0,0 @@
package org.broadinstitute.sting.utils.report;
import org.testng.Assert;
import org.broadinstitute.sting.BaseTest;
import org.broadinstitute.sting.utils.report.tags.Analysis;
import org.broadinstitute.sting.utils.report.tags.DataPoint;
import org.broadinstitute.sting.utils.report.tags.Param;
import org.testng.annotations.Test;
/**
* @author aaron
* <p/>
* Class AnalysisModuleScannerUnitTest
* <p/>
* Test out the analysis scanner, which takes an analysis module and extracts out basic data
*/
public class AnalysisModuleScannerUnitTest extends BaseTest {
@Test
public void testBasicScan() {
AnalysisModuleScanner scanner = new AnalysisModuleScanner(FakeAnalysis.class);
// check we found one param, and check its description
Assert.assertEquals(scanner.getParameters().size(), 3);
Assert.assertTrue("basic description".equals(scanner.getParameters().values().iterator().next().description()));
// check that the analysis name and description were set
Assert.assertTrue("testAnalysis".equals(scanner.getAnalysis().name()));
Assert.assertTrue("The is just a simple description".equals(scanner.getAnalysis().description()));
}
}
// --------------------------------------------------------------------------------
// my fake analysis class
// --------------------------------------------------------------------------------
@Analysis(name = "testAnalysis", description = "The is just a simple description")
class FakeAnalysis {
@Param(description = "basic description")
public String text = "GRRR";
@Param(description = "basic description")
public String text2superlonganme = "GRRR";
@Param(description = "basic description")
public String text3 = "GRRR";
@DataPoint(description = "basic description")
public String text4 = "GRRR";
@DataPoint(description = "basic description")
public String text5 = "GRRR";
@DataPoint(description = "basic description")
public String text6 = "GRRR";
public FakeAnalysis() {
}
}

View File

@ -1,46 +0,0 @@
package org.broadinstitute.sting.utils.report.templates;
import org.testng.Assert;
import org.testng.annotations.Test;
import java.util.ArrayList;
/**
* test out the text table
*/
public class TextTableUnitTest {
@Test
public void testBasicSetCell() {
TextTable table = new TextTable("name","description",new ArrayList<String>());
table.setCell(1,5,"entry");
int entriesSeen = 0;
ArrayList<ArrayList<String>> rows = table.rows;
for (int x = 0; x <= 1; x++)
for (int y = 0; y <= 5; y++)
if (x == 1 && y == 5) {
Assert.assertTrue(rows.get(x).get(y).equals("entry"));
entriesSeen++;
} else
Assert.assertTrue(rows.get(x).get(y).equals(""));
Assert.assertEquals(entriesSeen, 1, "Incorrect number of entries seen");
}
@Test
public void testBasicSetTwoCells() {
TextTable table = new TextTable("name","description",new ArrayList<String>());
table.setCell(1,5,"entry");
table.setCell(1,1,"entry");
int entriesSeen = 0;
ArrayList<ArrayList<String>> rows = table.rows;
for (int x = 0; x <= 1; x++)
for (int y = 0; y <= 5; y++)
if ((x == 1 && y == 5) || (x == 1 && y == 1)) {
Assert.assertTrue(rows.get(x).get(y).equals("entry"));
entriesSeen++;
}
else
Assert.assertTrue(rows.get(x).get(y).equals(""));
Assert.assertEquals(entriesSeen, 2, "Incorrect number of entries seen");
}
}