Merge branch 'master' of github.com:broadinstitute/gsa-unstable

This commit is contained in:
Ami Levy-Moonshine 2013-01-18 15:03:42 -05:00
commit 0fb7b73107
110 changed files with 7833 additions and 5358 deletions

View File

@ -61,6 +61,7 @@
<dependency org="commons-lang" name="commons-lang" rev="2.5"/>
<dependency org="commons-logging" name="commons-logging" rev="1.1.1"/>
<dependency org="commons-io" name="commons-io" rev="2.1"/>
<dependency org="commons-collections" name="commons-collections" rev="3.2.1"/>
<dependency org="org.apache.commons" name="commons-math" rev="2.2"/>
<!-- Lucene core utilities -->
@ -82,7 +83,7 @@
<!-- testing and evaluation dependencies -->
<dependency org="org.testng" name="testng" rev="5.14.1"/>
<dependency org="org.uncommons" name="reportng" rev="1.1.2"/>
<dependency org="com.google.code.caliper" name="caliper" rev="1.0-SNAPSHOT"/>
<dependency org="com.google.caliper" name="caliper" rev="0.5-rc1"/>
<!-- Contracts for Java and dependencies -->
<dependency org="com.google.code.cofoja" name="cofoja" rev="1.0-r139"/>

View File

@ -47,7 +47,6 @@
package org.broadinstitute.sting.gatk.downsampling;
import net.sf.samtools.SAMReadGroupRecord;
import net.sf.samtools.SAMRecord;
import org.broadinstitute.sting.utils.*;
import org.broadinstitute.sting.utils.pileup.*;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
@ -78,30 +77,15 @@ public class AlleleBiasedDownsamplingUtils {
for ( int i = 0; i < 4; i++ )
alleleStratifiedElements[i] = new ArrayList<PileupElement>();
// keep all of the reduced reads
final ArrayList<PileupElement> reducedReadPileups = new ArrayList<PileupElement>();
// start by stratifying the reads by the alleles they represent at this position
for( final PileupElement pe : pileup ) {
for ( final PileupElement pe : pileup ) {
// we do not want to remove a reduced read
if ( pe.getRead().isReducedRead() )
reducedReadPileups.add(pe);
final int baseIndex = BaseUtils.simpleBaseToBaseIndex(pe.getBase());
if ( baseIndex != -1 )
alleleStratifiedElements[baseIndex].add(pe);
}
// Unfortunately, we need to maintain the original pileup ordering of reads or FragmentUtils will complain later.
int numReadsToRemove = (int)(pileup.getNumberOfElements() * downsamplingFraction); // floor
final TreeSet<PileupElement> elementsToKeep = new TreeSet<PileupElement>(new Comparator<PileupElement>() {
@Override
public int compare(PileupElement element1, PileupElement element2) {
final int difference = element1.getRead().getAlignmentStart() - element2.getRead().getAlignmentStart();
return difference != 0 ? difference : element1.getRead().getReadName().compareTo(element2.getRead().getReadName());
if ( !pe.getRead().isReducedRead() ) {
final int baseIndex = BaseUtils.simpleBaseToBaseIndex(pe.getBase());
if ( baseIndex != -1 )
alleleStratifiedElements[baseIndex].add(pe);
}
});
elementsToKeep.addAll(reducedReadPileups);
}
// make a listing of allele counts
final int[] alleleCounts = new int[4];
@ -109,22 +93,30 @@ public class AlleleBiasedDownsamplingUtils {
alleleCounts[i] = alleleStratifiedElements[i].size();
// do smart down-sampling
int numReadsToRemove = (int)(pileup.getNumberOfElements() * downsamplingFraction); // floor
final int[] targetAlleleCounts = runSmartDownsampling(alleleCounts, numReadsToRemove);
final HashSet<PileupElement> readsToRemove = new HashSet<PileupElement>(numReadsToRemove);
for ( int i = 0; i < 4; i++ ) {
final ArrayList<PileupElement> alleleList = alleleStratifiedElements[i];
// if we don't need to remove any reads, keep them all
if ( alleleList.size() <= targetAlleleCounts[i] )
elementsToKeep.addAll(alleleList);
else
elementsToKeep.addAll(downsampleElements(alleleList, alleleList.size() - targetAlleleCounts[i], log));
// if we don't need to remove any reads, then don't
if ( alleleList.size() > targetAlleleCounts[i] )
readsToRemove.addAll(downsampleElements(alleleList, alleleList.size() - targetAlleleCounts[i], log));
}
// clean up pointers so memory can be garbage collected if needed
for ( int i = 0; i < 4; i++ )
alleleStratifiedElements[i].clear();
return new ReadBackedPileupImpl(pileup.getLocation(), new ArrayList<PileupElement>(elementsToKeep));
// we need to keep the reads sorted because the FragmentUtils code will expect them in coordinate order and will fail otherwise
final List<PileupElement> readsToKeep = new ArrayList<PileupElement>(pileup.getNumberOfElements() - numReadsToRemove);
for ( final PileupElement pe : pileup ) {
if ( !readsToRemove.contains(pe) ) {
readsToKeep.add(pe);
}
}
return new ReadBackedPileupImpl(pileup.getLocation(), new ArrayList<PileupElement>(readsToKeep));
}
private static int scoreAlleleCounts(final int[] alleleCounts) {
@ -188,37 +180,43 @@ public class AlleleBiasedDownsamplingUtils {
}
/**
* Performs allele biased down-sampling on a pileup and computes the list of elements to keep
* Performs allele biased down-sampling on a pileup and computes the list of elements to remove
*
* @param elements original list of records
* @param numElementsToRemove the number of records to remove
* @param log logging output
* @return the list of pileup elements TO KEEP
* @return the list of pileup elements TO REMOVE
*/
private static List<PileupElement> downsampleElements(final ArrayList<PileupElement> elements, final int numElementsToRemove, final PrintStream log) {
if ( numElementsToRemove == 0 )
return elements;
private static <T> List<T> downsampleElements(final List<T> elements, final int numElementsToRemove, final PrintStream log) {
ArrayList<T> elementsToRemove = new ArrayList<T>(numElementsToRemove);
// are there no elements to remove?
if ( numElementsToRemove == 0 )
return elementsToRemove;
// should we remove all of the elements?
final int pileupSize = elements.size();
if ( numElementsToRemove == pileupSize ) {
logAllElements(elements, log);
return new ArrayList<PileupElement>(0);
elementsToRemove.addAll(elements);
return elementsToRemove;
}
// create a bitset describing which elements to remove
final BitSet itemsToRemove = new BitSet(pileupSize);
for ( Integer selectedIndex : MathUtils.sampleIndicesWithoutReplacement(pileupSize, numElementsToRemove) ) {
itemsToRemove.set(selectedIndex);
}
ArrayList<PileupElement> elementsToKeep = new ArrayList<PileupElement>(pileupSize - numElementsToRemove);
for ( int i = 0; i < pileupSize; i++ ) {
if ( itemsToRemove.get(i) )
logRead(elements.get(i).getRead(), log);
else
elementsToKeep.add(elements.get(i));
if ( itemsToRemove.get(i) ) {
final T element = elements.get(i);
logElement(element, log);
elementsToRemove.add(element);
}
}
return elementsToKeep;
return elementsToRemove;
}
/**
@ -252,65 +250,30 @@ public class AlleleBiasedDownsamplingUtils {
final List<GATKSAMRecord> alleleBin = alleleReadMap.get(alleles.get(i));
if ( alleleBin.size() > targetAlleleCounts[i] ) {
readsToRemove.addAll(downsampleReads(alleleBin, alleleBin.size() - targetAlleleCounts[i], log));
readsToRemove.addAll(downsampleElements(alleleBin, alleleBin.size() - targetAlleleCounts[i], log));
}
}
return readsToRemove;
}
/**
* Performs allele biased down-sampling on a pileup and computes the list of elements to remove
*
* @param reads original list of records
* @param numElementsToRemove the number of records to remove
* @param log logging output
* @return the list of pileup elements TO REMOVE
*/
private static List<GATKSAMRecord> downsampleReads(final List<GATKSAMRecord> reads, final int numElementsToRemove, final PrintStream log) {
final ArrayList<GATKSAMRecord> readsToRemove = new ArrayList<GATKSAMRecord>(numElementsToRemove);
if ( numElementsToRemove == 0 )
return readsToRemove;
final int pileupSize = reads.size();
if ( numElementsToRemove == pileupSize ) {
logAllReads(reads, log);
return reads;
}
final BitSet itemsToRemove = new BitSet(pileupSize);
for ( Integer selectedIndex : MathUtils.sampleIndicesWithoutReplacement(pileupSize, numElementsToRemove) ) {
itemsToRemove.set(selectedIndex);
}
for ( int i = 0; i < pileupSize; i++ ) {
if ( itemsToRemove.get(i) ) {
final GATKSAMRecord read = reads.get(i);
readsToRemove.add(read);
logRead(read, log);
private static <T> void logAllElements(final List<T> elements, final PrintStream log) {
if ( log != null ) {
for ( final T obj : elements ) {
logElement(obj, log);
}
}
return readsToRemove;
}
private static void logAllElements(final List<PileupElement> elements, final PrintStream log) {
private static <T> void logElement(final T obj, final PrintStream log) {
if ( log != null ) {
for ( final PileupElement p : elements )
logRead(p.getRead(), log);
}
}
private static void logAllReads(final List<GATKSAMRecord> reads, final PrintStream log) {
if ( log != null ) {
for ( final GATKSAMRecord read : reads )
logRead(read, log);
}
}
final GATKSAMRecord read;
if ( obj instanceof PileupElement )
read = ((PileupElement)obj).getRead();
else
read = (GATKSAMRecord)obj;
private static void logRead(final SAMRecord read, final PrintStream log) {
if ( log != null ) {
final SAMReadGroupRecord readGroup = read.getReadGroup();
log.println(String.format("%s\t%s\t%s\t%s", read.getReadName(), readGroup.getSample(), readGroup.getLibrary(), readGroup.getPlatformUnit()));
}

View File

@ -99,7 +99,7 @@ public class DepthOfCoverage extends InfoFieldAnnotation implements StandardAnno
for (PerReadAlleleLikelihoodMap maps : perReadAlleleLikelihoodMap.values() ) {
for (Map.Entry<GATKSAMRecord,Map<Allele,Double>> el : maps.getLikelihoodReadMap().entrySet()) {
final GATKSAMRecord read = el.getKey();
depth += (read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinate(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1);
depth += (read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1);
}
}
}

View File

@ -144,7 +144,7 @@ public class DepthPerAlleleBySample extends GenotypeAnnotation implements Standa
continue; // read is non-informative
if (!vc.getAlleles().contains(a))
continue; // sanity check - shouldn't be needed
alleleCounts.put(a, alleleCounts.get(a) + (read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinate(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1));
alleleCounts.put(a, alleleCounts.get(a) + (read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1));
}
final int[] counts = new int[alleleCounts.size()];
counts[0] = alleleCounts.get(vc.getReference());

View File

@ -116,8 +116,8 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat
else if (table1 == null)
return annotationForOneTable(pValueForContingencyTable(table2));
else { // take the one with the best (i.e., least significant pvalue)
double pvalue1 = Math.max(pValueForContingencyTable(table1), MIN_PVALUE);
double pvalue2 = Math.max(pValueForContingencyTable(table2), MIN_PVALUE);
double pvalue1 = pValueForContingencyTable(table1);
double pvalue2 = pValueForContingencyTable(table2);
return annotationForOneTable(Math.max(pvalue1, pvalue2));
}
}
@ -129,7 +129,7 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat
* @return a hash map from FS -> phred-scaled pValue
*/
private Map<String, Object> annotationForOneTable(final double pValue) {
final Object value = String.format("%.3f", QualityUtils.phredScaleErrorRate(pValue));
final Object value = String.format("%.3f", QualityUtils.phredScaleErrorRate(Math.max(pValue, MIN_PVALUE))); // prevent INFINITYs
return Collections.singletonMap(FS, value);
// Map<String, Object> map = new HashMap<String, Object>();
// map.put(FS, String.format("%.3f", QualityUtils.phredScaleErrorRate(pValue)));
@ -265,24 +265,16 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat
for (PerReadAlleleLikelihoodMap maps : stratifiedPerReadAlleleLikelihoodMap.values() ) {
for (Map.Entry<GATKSAMRecord,Map<Allele,Double>> el : maps.getLikelihoodReadMap().entrySet()) {
final boolean matchesRef = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()).equals(ref,true);
final boolean matchesAlt = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue()).equals(alt,true);
if ( !matchesRef && !matchesAlt )
continue;
boolean isFW = el.getKey().getReadNegativeStrandFlag();
int row = matchesRef ? 0 : 1;
int column = isFW ? 0 : 1;
final Allele mostLikelyAllele = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue());
final GATKSAMRecord read = el.getKey();
table[row][column] += (read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinate(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1);
final int representativeCount = read.isReducedRead() ? read.getReducedCount(ReadUtils.getReadCoordinateForReferenceCoordinateUpToEndOfRead(read, vc.getStart(), ReadUtils.ClippingTail.RIGHT_TAIL)) : 1;
updateTable(table, mostLikelyAllele, read, ref, alt, representativeCount);
}
}
return table;
}
/**
Allocate and fill a 2x2 strand contingency table. In the end, it'll look something like this:
* fw rc
@ -299,31 +291,36 @@ public class FisherStrand extends InfoFieldAnnotation implements StandardAnnotat
for ( Map.Entry<String, AlignmentContext> sample : stratifiedContexts.entrySet() ) {
for (PileupElement p : sample.getValue().getBasePileup()) {
// ignore reduced reads because they are always on the forward strand!
// TODO -- when het compression is enabled in RR, we somehow need to allow those reads through into the Fisher test
if ( p.getRead().isReducedRead() )
continue;
if ( ! RankSumTest.isUsableBase(p, false) ) // ignore deletions
continue;
if ( p.getQual() < minQScoreToConsider || p.getMappingQual() < minQScoreToConsider )
continue;
final Allele base = Allele.create(p.getBase(), false);
final boolean isFW = !p.getRead().getReadNegativeStrandFlag();
final boolean matchesRef = ref.equals(base, true);
final boolean matchesAlt = alt.equals(base, true);
if ( matchesRef || matchesAlt ) {
int row = matchesRef ? 0 : 1;
int column = isFW ? 0 : 1;
table[row][column] += p.getRepresentativeCount();
}
updateTable(table, Allele.create(p.getBase(), false), p.getRead(), ref, alt, p.getRepresentativeCount());
}
}
return table;
}
private static void updateTable(final int[][] table, final Allele allele, final GATKSAMRecord read, final Allele ref, final Allele alt, final int representativeCount) {
// ignore reduced reads because they are always on the forward strand!
// TODO -- when het compression is enabled in RR, we somehow need to allow those reads through into the Fisher test
if ( read.isReducedRead() )
return;
final boolean matchesRef = allele.equals(ref, true);
final boolean matchesAlt = allele.equals(alt, true);
if ( matchesRef || matchesAlt ) {
final boolean isFW = !read.getReadNegativeStrandFlag();
int row = matchesRef ? 0 : 1;
int column = isFW ? 0 : 1;
table[row][column] += representativeCount;
}
}
}

View File

@ -95,9 +95,9 @@ public class GCContent extends InfoFieldAnnotation implements ExperimentalAnnota
for ( byte base : ref.getBases() ) {
int baseIndex = BaseUtils.simpleBaseToBaseIndex(base);
if ( baseIndex == BaseUtils.gIndex || baseIndex == BaseUtils.cIndex )
if ( baseIndex == BaseUtils.Base.G.ordinal() || baseIndex == BaseUtils.Base.C.ordinal() )
gc++;
else if ( baseIndex == BaseUtils.aIndex || baseIndex == BaseUtils.tIndex )
else if ( baseIndex == BaseUtils.Base.A.ordinal() || baseIndex == BaseUtils.Base.T.ordinal() )
at++;
else
; // ignore

View File

@ -56,7 +56,6 @@ import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.InfoFieldAnnot
import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.StandardAnnotation;
import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap;
import org.broadinstitute.variant.utils.BaseUtils;
import org.broadinstitute.sting.utils.Haplotype;
import org.broadinstitute.sting.utils.MathUtils;
import org.broadinstitute.sting.utils.QualityUtils;
import org.broadinstitute.variant.vcf.VCFHeaderLineType;
@ -236,8 +235,8 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
final byte[] haplotypeBases = new byte[contextSize];
Arrays.fill(haplotypeBases, (byte) REGEXP_WILDCARD);
final double[] baseQualities = new double[contextSize];
Arrays.fill(baseQualities, 0.0);
final byte[] baseQualities = new byte[contextSize];
Arrays.fill(baseQualities, (byte)0);
byte[] readBases = read.getReadBases();
readBases = AlignmentUtils.readToAlignmentByteArray(read.getCigar(), readBases); // Adjust the read bases based on the Cigar string
@ -267,7 +266,7 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
readQuals[baseOffset] = (byte) 0;
} // quals less than 5 are used as codes and don't have actual probabilistic meaning behind them
haplotypeBases[i] = readBases[baseOffset];
baseQualities[i] = (double) readQuals[baseOffset];
baseQualities[i] = readQuals[baseOffset];
}
return new Haplotype(haplotypeBases, baseQualities);
@ -286,10 +285,10 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
final int length = a.length;
final byte[] consensusChars = new byte[length];
final double[] consensusQuals = new double[length];
final int[] consensusQuals = new int[length];
final double[] qualsA = haplotypeA.getQuals();
final double[] qualsB = haplotypeB.getQuals();
final int[] qualsA = haplotypeA.getQuals();
final int[] qualsB = haplotypeB.getQuals();
for (int i = 0; i < length; i++) {
chA = a[i];
@ -300,7 +299,7 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
if ((chA == wc) && (chB == wc)) {
consensusChars[i] = wc;
consensusQuals[i] = 0.0;
consensusQuals[i] = 0;
} else if ((chA == wc)) {
consensusChars[i] = chB;
consensusQuals[i] = qualsB[i];
@ -433,7 +432,6 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
}
public List<String> getKeyNames() {
return Arrays.asList("HaplotypeScore");
}
@ -441,4 +439,46 @@ public class HaplotypeScore extends InfoFieldAnnotation implements StandardAnnot
public List<VCFInfoHeaderLine> getDescriptions() {
return Arrays.asList(new VCFInfoHeaderLine("HaplotypeScore", 1, VCFHeaderLineType.Float, "Consistency of the site with at most two segregating haplotypes"));
}
private static class Haplotype {
private final byte[] bases;
private final int[] quals;
private int qualitySum = -1;
public Haplotype( final byte[] bases, final int[] quals ) {
this.bases = bases;
this.quals = quals;
}
public Haplotype( final byte[] bases, final int qual ) {
this.bases = bases;
quals = new int[bases.length];
Arrays.fill(quals, qual);
}
public Haplotype( final byte[] bases, final byte[] quals ) {
this.bases = bases;
this.quals = new int[quals.length];
for ( int i = 0 ; i < quals.length; i++ )
this.quals[i] = (int)quals[i];
}
public double getQualitySum() {
if ( qualitySum == -1 ) {
qualitySum = 0;
for ( final int qual : quals ) {
qualitySum += qual;
}
}
return qualitySum;
}
public int[] getQuals() {
return quals.clone();
}
public byte[] getBases() {
return bases.clone();
}
}
}

View File

@ -169,8 +169,7 @@ public abstract class RankSumTest extends InfoFieldAnnotation implements ActiveR
* @return true if this base is part of a meaningful read for comparison, false otherwise
*/
public static boolean isUsableBase(final PileupElement p, final boolean allowDeletions) {
return !(p.isInsertionAtBeginningOfRead() ||
(! allowDeletions && p.isDeletion()) ||
return !((! allowDeletions && p.isDeletion()) ||
p.getMappingQual() == 0 ||
p.getMappingQual() == QualityUtils.MAPPING_QUALITY_UNAVAILABLE ||
((int) p.getQual()) < QualityUtils.MIN_USABLE_Q_SCORE); // need the unBAQed quality score here

View File

@ -52,6 +52,7 @@ import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.annotator.interfaces.*;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.genotyper.PerReadAlleleLikelihoodMap;
import org.broadinstitute.variant.vcf.*;
import org.broadinstitute.sting.utils.exceptions.UserException;
@ -214,10 +215,10 @@ public class VariantAnnotatorEngine {
Map<String, Object> infoAnnotations = new LinkedHashMap<String, Object>(vc.getAttributes());
// annotate db occurrences
vc = annotateDBs(tracker, ref, vc, infoAnnotations);
vc = annotateDBs(tracker, ref.getLocus(), vc, infoAnnotations);
// annotate expressions where available
annotateExpressions(tracker, ref, infoAnnotations);
annotateExpressions(tracker, ref.getLocus(), infoAnnotations);
// go through all the requested info annotationTypes
for ( InfoFieldAnnotation annotationType : requestedInfoAnnotations ) {
@ -254,10 +255,22 @@ public class VariantAnnotatorEngine {
return builder.genotypes(annotateGenotypes(null, null, null, vc, perReadAlleleLikelihoodMap)).make();
}
private VariantContext annotateDBs(RefMetaDataTracker tracker, ReferenceContext ref, VariantContext vc, Map<String, Object> infoAnnotations) {
public VariantContext annotateDBs(final RefMetaDataTracker tracker, final GenomeLoc loc, VariantContext vc) {
final Map<String, Object> newInfoAnnotations = new HashMap<String, Object>(0);
vc = annotateDBs(tracker, loc, vc, newInfoAnnotations);
if ( !newInfoAnnotations.isEmpty() ) {
final VariantContextBuilder builder = new VariantContextBuilder(vc).attributes(newInfoAnnotations);
vc = builder.make();
}
return vc;
}
private VariantContext annotateDBs(final RefMetaDataTracker tracker, final GenomeLoc loc, VariantContext vc, final Map<String, Object> infoAnnotations) {
for ( Map.Entry<RodBinding<VariantContext>, String> dbSet : dbAnnotations.entrySet() ) {
if ( dbSet.getValue().equals(VCFConstants.DBSNP_KEY) ) {
final String rsID = VCFUtils.rsIDOfFirstRealVariant(tracker.getValues(dbSet.getKey(), ref.getLocus()), vc.getType());
final String rsID = VCFUtils.rsIDOfFirstRealVariant(tracker.getValues(dbSet.getKey(), loc), vc.getType());
// add the ID if appropriate
if ( rsID != null ) {
@ -273,7 +286,7 @@ public class VariantAnnotatorEngine {
}
} else {
boolean overlapsComp = false;
for ( VariantContext comp : tracker.getValues(dbSet.getKey(), ref.getLocus()) ) {
for ( VariantContext comp : tracker.getValues(dbSet.getKey(), loc) ) {
if ( !comp.isFiltered() && ( !requireStrictAlleleMatch || comp.getAlleles().equals(vc.getAlleles()) ) ) {
overlapsComp = true;
break;
@ -287,9 +300,9 @@ public class VariantAnnotatorEngine {
return vc;
}
private void annotateExpressions(RefMetaDataTracker tracker, ReferenceContext ref, Map<String, Object> infoAnnotations) {
private void annotateExpressions(final RefMetaDataTracker tracker, final GenomeLoc loc, final Map<String, Object> infoAnnotations) {
for ( VAExpression expression : requestedExpressions ) {
Collection<VariantContext> VCs = tracker.getValues(expression.binding, ref.getLocus());
Collection<VariantContext> VCs = tracker.getValues(expression.binding, loc);
if ( VCs.size() == 0 )
continue;

View File

@ -938,7 +938,7 @@ public class DepthOfCoverage extends LocusWalker<Map<DoCOutputType.Partition,Map
int nbases = 0;
for ( byte b : BaseUtils.EXTENDED_BASES ) {
nbases++;
if ( includeDeletions || b != BaseUtils.D ) {
if ( includeDeletions || b != BaseUtils.Base.D.base ) {
s.append((char)b);
s.append(":");
s.append(counts[BaseUtils.extendedBaseToBaseIndex(b)]);

View File

@ -223,7 +223,7 @@ public class DepthOfCoverageStats {
int total = 0;
int[] counts = countsBySample.get(s);
for ( byte base : BaseUtils.EXTENDED_BASES ) {
if ( includeDeletions || ! ( base == BaseUtils.D) ) { // note basesAreEqual assigns TRUE to (N,D) as both have simple index -1
if ( includeDeletions || ! ( base == BaseUtils.Base.D.base) ) { // note basesAreEqual assigns TRUE to (N,D) as both have simple index -1
total += counts[BaseUtils.extendedBaseToBaseIndex(base)];
}
}

View File

@ -99,10 +99,6 @@ public class ConsensusAlleleCounter {
Map<String, AlignmentContext> contexts,
AlignmentContextUtils.ReadOrientation contextType) {
final Map<String, Integer> consensusIndelStrings = countConsensusAlleles(ref, contexts, contextType);
// logger.info("Alleles at " + ref.getLocus());
// for ( Map.Entry<String, Integer> elt : consensusIndelStrings.entrySet() ) {
// logger.info(" " + elt.getValue() + " => " + elt.getKey());
// }
return consensusCountsToAlleles(ref, consensusIndelStrings);
}
@ -138,14 +134,9 @@ public class ConsensusAlleleCounter {
final int nReadsOverall = indelPileup.getNumberOfElements();
if ( nIndelReads == 0 || (nIndelReads / (1.0 * nReadsOverall)) < minFractionInOneSample ) {
// if ( nIndelReads > 0 )
// logger.info("Skipping sample " + sample.getKey() + " with nIndelReads " + nIndelReads + " nReads " + nReadsOverall);
continue;
// } else {
// logger.info("### Keeping sample " + sample.getKey() + " with nIndelReads " + nIndelReads + " nReads " + nReadsOverall);
}
for (PileupElement p : indelPileup) {
final GATKSAMRecord read = ReadClipper.hardClipAdaptorSequence(p.getRead());
if (read == null)
@ -154,17 +145,10 @@ public class ConsensusAlleleCounter {
continue;
}
/* if (DEBUG && p.isIndel()) {
System.out.format("Read: %s, cigar: %s, aln start: %d, aln end: %d, p.len:%d, Type:%s, EventBases:%s\n",
read.getReadName(),read.getCigar().toString(),read.getAlignmentStart(),read.getAlignmentEnd(),
p.getEventLength(),p.getType().toString(), p.getEventBases());
}
*/
String indelString = p.getEventBases();
if ( p.isBeforeInsertion() ) {
// edge case: ignore a deletion immediately preceding an insertion as p.getEventBases() returns null [EB]
if ( indelString == null )
final String insertionBases = p.getBasesOfImmediatelyFollowingInsertion();
// edge case: ignore a deletion immediately preceding an insertion as p.getBasesOfImmediatelyFollowingInsertion() returns null [EB]
if ( insertionBases == null )
continue;
boolean foundKey = false;
@ -182,20 +166,20 @@ public class ConsensusAlleleCounter {
String s = cList.get(k).getFirst();
int cnt = cList.get(k).getSecond();
// case 1: current insertion is prefix of indel in hash map
if (s.startsWith(indelString)) {
if (s.startsWith(insertionBases)) {
cList.set(k,new Pair<String, Integer>(s,cnt+1));
foundKey = true;
}
else if (indelString.startsWith(s)) {
else if (insertionBases.startsWith(s)) {
// case 2: indel stored in hash table is prefix of current insertion
// In this case, new bases are new key.
foundKey = true;
cList.set(k,new Pair<String, Integer>(indelString,cnt+1));
cList.set(k,new Pair<String, Integer>(insertionBases,cnt+1));
}
}
if (!foundKey)
// none of the above: event bases not supported by previous table, so add new key
cList.add(new Pair<String, Integer>(indelString,1));
cList.add(new Pair<String, Integer>(insertionBases,1));
}
else if (read.getAlignmentStart() == loc.getStart()+1) {
@ -203,28 +187,28 @@ public class ConsensusAlleleCounter {
for (int k=0; k < cList.size(); k++) {
String s = cList.get(k).getFirst();
int cnt = cList.get(k).getSecond();
if (s.endsWith(indelString)) {
if (s.endsWith(insertionBases)) {
// case 1: current insertion (indelString) is suffix of indel in hash map (s)
cList.set(k,new Pair<String, Integer>(s,cnt+1));
foundKey = true;
}
else if (indelString.endsWith(s)) {
else if (insertionBases.endsWith(s)) {
// case 2: indel stored in hash table is prefix of current insertion
// In this case, new bases are new key.
foundKey = true;
cList.set(k,new Pair<String, Integer>(indelString,cnt+1));
cList.set(k,new Pair<String, Integer>(insertionBases,cnt+1));
}
}
if (!foundKey)
// none of the above: event bases not supported by previous table, so add new key
cList.add(new Pair<String, Integer>(indelString,1));
cList.add(new Pair<String, Integer>(insertionBases,1));
}
else {
// normal case: insertion somewhere in the middle of a read: add count to arrayList
int cnt = consensusIndelStrings.containsKey(indelString)? consensusIndelStrings.get(indelString):0;
cList.add(new Pair<String, Integer>(indelString,cnt+1));
int cnt = consensusIndelStrings.containsKey(insertionBases)? consensusIndelStrings.get(insertionBases):0;
cList.add(new Pair<String, Integer>(insertionBases,cnt+1));
}
// copy back arrayList into hashMap
@ -234,11 +218,10 @@ public class ConsensusAlleleCounter {
}
}
else if ( p.isBeforeDeletedBase() ) {
indelString = String.format("D%d",p.getEventLength());
int cnt = consensusIndelStrings.containsKey(indelString)? consensusIndelStrings.get(indelString):0;
consensusIndelStrings.put(indelString,cnt+1);
else if ( p.isBeforeDeletionStart() ) {
final String deletionString = String.format("D%d",p.getLengthOfImmediatelyFollowingIndel());
int cnt = consensusIndelStrings.containsKey(deletionString)? consensusIndelStrings.get(deletionString):0;
consensusIndelStrings.put(deletionString,cnt+1);
}
}
}

View File

@ -425,7 +425,7 @@ public class DiploidSNPGenotypeLikelihoods implements Cloneable {
byte qual = p.getQual();
if ( qual > SAMUtils.MAX_PHRED_SCORE )
throw new UserException.MalformedBAM(p.getRead(), String.format("the maximum allowed quality score is %d, but a quality of %d was observed in read %s. Perhaps your BAM incorrectly encodes the quality scores in Sanger format; see http://en.wikipedia.org/wiki/FASTQ_format for more details", SAMUtils.MAX_PHRED_SCORE, qual, p.getRead().getReadName()));
throw new UserException.MisencodedBAM(p.getRead(), "we encountered an extremely high quality score (" + (int)qual + ")");
if ( capBaseQualsAtMappingQual )
qual = (byte)Math.min((int)qual, p.getMappingQual());
if ( (int)qual < minBaseQual )

View File

@ -214,7 +214,7 @@ public class ErrorModel {
if (DEBUG)
System.out.format("PE: base:%s isNextToDel:%b isNextToIns:%b eventBases:%s eventLength:%d Allele:%s RefAllele:%s\n",
pileupElement.getBase(), pileupElement.isBeforeDeletionStart(),
pileupElement.isBeforeInsertion(),pileupElement.getEventBases(),pileupElement.getEventLength(), allele.toString(), refAllele.toString());
pileupElement.isBeforeInsertion(),pileupElement.getBasesOfImmediatelyFollowingInsertion(),pileupElement.getLengthOfImmediatelyFollowingIndel(), allele.toString(), refAllele.toString());
//pileupElement.
// if test allele is ref, any base mismatch, or any insertion/deletion at start of pileup count as mismatch
@ -238,11 +238,11 @@ public class ErrorModel {
// for non-ref alleles,
byte[] alleleBases = allele.getBases();
int eventLength = alleleBases.length - refAllele.getBases().length;
if (eventLength < 0 && pileupElement.isBeforeDeletionStart() && pileupElement.getEventLength() == -eventLength)
if (eventLength < 0 && pileupElement.isBeforeDeletionStart() && pileupElement.getLengthOfImmediatelyFollowingIndel() == -eventLength)
return true;
if (eventLength > 0 && pileupElement.isBeforeInsertion() &&
Arrays.equals(pileupElement.getEventBases().getBytes(),Arrays.copyOfRange(alleleBases,1,alleleBases.length))) // allele contains ref byte, but pileupElement's event bases doesn't
Arrays.equals(pileupElement.getBasesOfImmediatelyFollowingInsertion().getBytes(),Arrays.copyOfRange(alleleBases,1,alleleBases.length))) // allele contains ref byte, but pileupElement's event bases doesn't
return true;
return false;

View File

@ -210,7 +210,7 @@ public class GeneralPloidyIndelGenotypeLikelihoods extends GeneralPloidyGenotype
// count number of elements in pileup
for (PileupElement elt : pileup) {
if (VERBOSE)
System.out.format("base:%s isNextToDel:%b isNextToIns:%b eventBases:%s eventLength:%d\n",elt.getBase(), elt.isBeforeDeletionStart(),elt.isBeforeInsertion(),elt.getEventBases(),elt.getEventLength());
System.out.format("base:%s isNextToDel:%b isNextToIns:%b eventBases:%s eventLength:%d\n",elt.getBase(), elt.isBeforeDeletionStart(),elt.isBeforeInsertion(),elt.getBasesOfImmediatelyFollowingInsertion(),elt.getLengthOfImmediatelyFollowingIndel());
int idx =0;
for (Allele allele : alleles) {
int cnt = numSeenBases.get(idx);

View File

@ -323,22 +323,12 @@ public class GeneralPloidySNPGenotypeLikelihoods extends GeneralPloidyGenotypeLi
public ReadBackedPileup createBAQedPileup( final ReadBackedPileup pileup ) {
final List<PileupElement> BAQedElements = new ArrayList<PileupElement>();
for( final PileupElement PE : pileup ) {
final PileupElement newPE = new BAQedPileupElement( PE );
final PileupElement newPE = new SNPGenotypeLikelihoodsCalculationModel.BAQedPileupElement( PE );
BAQedElements.add( newPE );
}
return new ReadBackedPileupImpl( pileup.getLocation(), BAQedElements );
}
public class BAQedPileupElement extends PileupElement {
public BAQedPileupElement( final PileupElement PE ) {
super(PE.getRead(), PE.getOffset(), PE.isDeletion(), PE.isBeforeDeletedBase(), PE.isAfterDeletedBase(), PE.isBeforeInsertion(), PE.isAfterInsertion(), PE.isNextToSoftClip());
}
@Override
public byte getQual( final int offset ) { return BAQ.calcBAQFromTag(getRead(), offset, true); }
}
/**
* Helper function that returns the phred-scaled base quality score we should use for calculating
* likelihoods for a pileup element. May return 0 to indicate that the observation is bad, and may

View File

@ -252,7 +252,7 @@ public class IndelGenotypeLikelihoodsCalculationModel extends GenotypeLikelihood
protected int getFilteredDepth(ReadBackedPileup pileup) {
int count = 0;
for (PileupElement p : pileup) {
if (p.isDeletion() || p.isInsertionAtBeginningOfRead() || BaseUtils.isRegularBase(p.getBase()))
if (p.isDeletion() || BaseUtils.isRegularBase(p.getBase()))
count += p.getRepresentativeCount();
}

View File

@ -237,11 +237,16 @@ public class SNPGenotypeLikelihoodsCalculationModel extends GenotypeLikelihoodsC
public static class BAQedPileupElement extends PileupElement {
public BAQedPileupElement( final PileupElement PE ) {
super(PE.getRead(), PE.getOffset(), PE.isDeletion(), PE.isBeforeDeletedBase(), PE.isAfterDeletedBase(), PE.isBeforeInsertion(), PE.isAfterInsertion(), PE.isNextToSoftClip());
super(PE);
}
@Override
public byte getQual( final int offset ) { return BAQ.calcBAQFromTag(getRead(), offset, true); }
public byte getQual() {
if ( isDeletion() )
return super.getQual();
else
return BAQ.calcBAQFromTag(getRead(), offset, true);
}
}
private static class SampleGenotypeData {

View File

@ -47,6 +47,7 @@
package org.broadinstitute.sting.gatk.walkers.haplotypecaller;
import com.google.java.contract.Ensures;
import net.sf.samtools.*;
import org.broadinstitute.sting.commandline.*;
import org.broadinstitute.sting.gatk.CommandLineGATK;
import org.broadinstitute.sting.gatk.GenomeAnalysisEngine;
@ -55,7 +56,9 @@ import org.broadinstitute.sting.gatk.arguments.StandardCallerArgumentCollection;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.AlignmentContextUtils;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.downsampling.DownsampleType;
import org.broadinstitute.sting.gatk.filters.BadMateFilter;
import org.broadinstitute.sting.gatk.io.StingSAMFileWriter;
import org.broadinstitute.sting.gatk.iterators.ReadTransformer;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.*;
@ -66,6 +69,7 @@ import org.broadinstitute.sting.gatk.walkers.genotyper.UnifiedArgumentCollection
import org.broadinstitute.sting.gatk.walkers.genotyper.UnifiedGenotyperEngine;
import org.broadinstitute.sting.gatk.walkers.genotyper.VariantCallContext;
import org.broadinstitute.sting.utils.*;
import org.broadinstitute.sting.utils.activeregion.ActiveRegion;
import org.broadinstitute.sting.utils.activeregion.ActiveRegionReadState;
import org.broadinstitute.sting.utils.activeregion.ActivityProfileResult;
import org.broadinstitute.sting.utils.clipping.ReadClipper;
@ -129,6 +133,7 @@ import java.util.*;
@PartitionBy(PartitionType.LOCUS)
@BAQMode(ApplicationTime = ReadTransformer.ApplicationTime.FORBIDDEN)
@ActiveRegionExtension(extension=65, maxRegion=300)
//@Downsample(by= DownsampleType.BY_SAMPLE, toCoverage=5)
public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implements AnnotatorCompatible {
/**
@ -140,6 +145,17 @@ public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implem
@Output(fullName="graphOutput", shortName="graph", doc="File to which debug assembly graph information should be written", required = false)
protected PrintStream graphWriter = null;
/**
* The assembled haplotypes will be written as BAM to this file if requested. Really for debugging purposes only. Note that the output here
* does not include uninformative reads so that not every input read is emitted to the bam.
*/
@Hidden
@Output(fullName="bamOutput", shortName="bam", doc="File to which assembled haplotypes should be written", required = false)
protected StingSAMFileWriter bamWriter = null;
private SAMFileHeader bamHeader = null;
private long uniqueNameCounter = 1;
private final static String readGroupId = "ArtificialHaplotype";
/**
* The PairHMM implementation to use for genotype likelihood calculations. The various implementations balance a tradeoff of accuracy and runtime.
*/
@ -168,6 +184,16 @@ public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implem
@Argument(fullName="downsampleRegion", shortName="dr", doc="coverage, per-sample, to downsample each active region to", required = false)
protected int DOWNSAMPLE_PER_SAMPLE_PER_REGION = 1000;
/**
* If this flag is provided, the haplotype caller will include unmapped reads in the assembly and calling
* when these reads occur in the region being analyzed. Typically, for paired end analyses, one pair of the
* read can map, but if its pair is too divergent then it may be unmapped and placed next to its mate, taking
* the mates contig and alignment start. If this flag is provided the haplotype caller will see such reads,
* and may make use of them in assembly and calling, where possible.
*/
@Argument(fullName="includeUmappedReads", shortName="unmapped", doc="If provided, unmapped reads with chromosomal coordinates (i.e., those placed to their maps) will be included in the assembly and calling", required = false)
protected boolean includeUnmappedReads = false;
@Argument(fullName="useAllelesTrigger", shortName="allelesTrigger", doc = "If specified, use additional trigger on variants found in an external alleles file", required=false)
protected boolean USE_ALLELES_TRIGGER = false;
@ -175,6 +201,10 @@ public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implem
@Argument(fullName="useFilteredReadsForAnnotations", shortName="useFilteredReadsForAnnotations", doc = "If specified, use the contamination-filtered read maps for the purposes of annotating variants", required=false)
protected boolean USE_FILTERED_READ_MAP_FOR_ANNOTATIONS = false;
@Hidden
@Argument(fullName="justDetermineActiveRegions", shortName="justDetermineActiveRegions", doc = "If specified, the HC won't actually do any assembly or calling, it'll just run the upfront active region determination code. Useful for benchmarking and scalability testing", required=false)
protected boolean justDetermineActiveRegions = false;
/**
* rsIDs from this file are used to populate the ID column of the output. Also, the DB INFO flag will be set when appropriate.
* dbSNP is not used in any way for the calculations themselves.
@ -236,6 +266,8 @@ public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implem
// the genotyping engine
private GenotypingEngine genotypingEngine = null;
private VariantAnnotatorEngine annotationEngine = null;
// fasta reference reader to supplement the edges of the reference sequence
private CachingIndexedFastaSequenceFile referenceReader;
@ -280,7 +312,7 @@ public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implem
UG_engine_simple_genotyper = new UnifiedGenotyperEngine(getToolkit(), simpleUAC, logger, null, null, samples, GATKVariantContextUtils.DEFAULT_PLOIDY);
// initialize the output VCF header
final VariantAnnotatorEngine annotationEngine = new VariantAnnotatorEngine(Arrays.asList(annotationClassesToUse), annotationsToUse, annotationsToExclude, this, getToolkit());
annotationEngine = new VariantAnnotatorEngine(Arrays.asList(annotationClassesToUse), annotationsToUse, annotationsToExclude, this, getToolkit());
Set<VCFHeaderLine> headerInfo = new HashSet<VCFHeaderLine>();
@ -314,6 +346,9 @@ public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implem
assemblyEngine = new SimpleDeBruijnAssembler( DEBUG, graphWriter, minKmer );
likelihoodCalculationEngine = new LikelihoodCalculationEngine( (byte)gcpHMM, DEBUG, pairHMM );
genotypingEngine = new GenotypingEngine( DEBUG, annotationEngine, USE_FILTERED_READ_MAP_FOR_ANNOTATIONS );
if ( bamWriter != null )
setupBamWriter();
}
//---------------------------------------------------------------------------------------------------------------
@ -329,11 +364,20 @@ public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implem
// enable non primary and extended reads in the active region
@Override
public EnumSet<ActiveRegionReadState> desiredReadStates() {
return EnumSet.of(
ActiveRegionReadState.PRIMARY,
ActiveRegionReadState.NONPRIMARY,
ActiveRegionReadState.EXTENDED
);
if ( includeUnmappedReads ) {
throw new UserException.BadArgumentValue("includeUmappedReads", "is not yet functional");
// return EnumSet.of(
// ActiveRegionReadState.PRIMARY,
// ActiveRegionReadState.NONPRIMARY,
// ActiveRegionReadState.EXTENDED,
// ActiveRegionReadState.UNMAPPED
// );
} else
return EnumSet.of(
ActiveRegionReadState.PRIMARY,
ActiveRegionReadState.NONPRIMARY,
ActiveRegionReadState.EXTENDED
);
}
@Override
@ -371,7 +415,7 @@ public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implem
final byte qual = p.getQual();
if( p.isDeletion() || qual > (byte) 18) {
int AA = 0; final int AB = 1; int BB = 2;
if( p.getBase() != ref.getBase() || p.isDeletion() || p.isBeforeDeletedBase() || p.isAfterDeletedBase() || p.isBeforeInsertion() || p.isAfterInsertion() || p.isNextToSoftClip() ) {
if( p.getBase() != ref.getBase() || p.isDeletion() || p.isBeforeDeletionStart() || p.isAfterDeletionEnd() || p.isBeforeInsertion() || p.isAfterInsertion() || p.isNextToSoftClip() ) {
AA = 2;
BB = 0;
if( p.isNextToSoftClip() ) {
@ -402,7 +446,10 @@ public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implem
//---------------------------------------------------------------------------------------------------------------
@Override
public Integer map( final org.broadinstitute.sting.utils.activeregion.ActiveRegion activeRegion, final RefMetaDataTracker metaDataTracker ) {
public Integer map( final ActiveRegion activeRegion, final RefMetaDataTracker metaDataTracker ) {
if ( justDetermineActiveRegions )
// we're benchmarking ART and/or the active region determination code in the HC, just leave without doing any work
return 1;
final ArrayList<VariantContext> activeAllelesToGenotype = new ArrayList<VariantContext>();
@ -420,8 +467,7 @@ public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implem
if( UG_engine.getUAC().GenotypingMode == GenotypeLikelihoodsCalculationModel.GENOTYPING_MODE.GENOTYPE_GIVEN_ALLELES && activeAllelesToGenotype.isEmpty() ) { return 0; } // No alleles found in this region so nothing to do!
finalizeActiveRegion( activeRegion ); // merge overlapping fragments, clip adapter and low qual tails
final Haplotype referenceHaplotype = new Haplotype(activeRegion.getActiveRegionReference(referenceReader)); // Create the reference haplotype which is the bases from the reference that make up the active region
referenceHaplotype.setIsReference(true);
final Haplotype referenceHaplotype = new Haplotype(activeRegion.getActiveRegionReference(referenceReader), true); // Create the reference haplotype which is the bases from the reference that make up the active region
final byte[] fullReferenceWithPadding = activeRegion.getFullReference(referenceReader, REFERENCE_PADDING);
//int PRUNE_FACTOR = Math.max(MIN_PRUNE_FACTOR, determinePruneFactorFromCoverage( activeRegion ));
final ArrayList<Haplotype> haplotypes = assemblyEngine.runLocalAssembly( activeRegion, referenceHaplotype, fullReferenceWithPadding, getPaddedLoc(activeRegion), MIN_PRUNE_FACTOR, activeAllelesToGenotype );
@ -452,9 +498,31 @@ public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implem
activeRegion.getLocation(),
getToolkit().getGenomeLocParser(),
activeAllelesToGenotype ) ) {
annotationEngine.annotateDBs(metaDataTracker, getToolkit().getGenomeLocParser().createGenomeLoc(call), call);
vcfWriter.add( call );
}
if ( bamWriter != null ) {
// write the haplotypes to the bam
final GenomeLoc paddedRefLoc = getPaddedLoc(activeRegion);
for ( Haplotype haplotype : haplotypes )
writeHaplotype(haplotype, paddedRefLoc, bestHaplotypes.contains(haplotype));
// we need to remap the Alleles back to the Haplotypes; inefficient but unfortunately this is a requirement currently
final Map<Allele, Haplotype> alleleToHaplotypeMap = new HashMap<Allele, Haplotype>(haplotypes.size());
for ( final Haplotype haplotype : haplotypes )
alleleToHaplotypeMap.put(Allele.create(haplotype.getBases()), haplotype);
// next, output the interesting reads for each sample aligned against the appropriate haplotype
for ( final PerReadAlleleLikelihoodMap readAlleleLikelihoodMap : stratifiedReadMap.values() ) {
for ( Map.Entry<GATKSAMRecord, Map<Allele, Double>> entry : readAlleleLikelihoodMap.getLikelihoodReadMap().entrySet() ) {
final Allele bestAllele = PerReadAlleleLikelihoodMap.getMostLikelyAllele(entry.getValue());
if ( bestAllele != Allele.NO_CALL )
writeReadAgainstHaplotype(entry.getKey(), alleleToHaplotypeMap.get(bestAllele), paddedRefLoc.getStart());
}
}
}
if( DEBUG ) { System.out.println("----------------------------------------------------------------------------------"); }
return 1; // One active region was processed during this map call
@ -548,6 +616,95 @@ public class HaplotypeCaller extends ActiveRegionWalker<Integer, Integer> implem
return returnMap;
}
private void setupBamWriter() {
// prepare the bam header
bamHeader = new SAMFileHeader();
bamHeader.setSequenceDictionary(getToolkit().getSAMFileHeader().getSequenceDictionary());
bamHeader.setSortOrder(SAMFileHeader.SortOrder.coordinate);
// include the original read groups plus a new artificial one for the haplotypes
final List<SAMReadGroupRecord> readGroups = new ArrayList<SAMReadGroupRecord>(getToolkit().getSAMFileHeader().getReadGroups());
final SAMReadGroupRecord rg = new SAMReadGroupRecord(readGroupId);
rg.setSample("HC");
rg.setSequencingCenter("BI");
readGroups.add(rg);
bamHeader.setReadGroups(readGroups);
bamWriter.setPresorted(false);
bamWriter.writeHeader(bamHeader);
}
private void writeHaplotype(final Haplotype haplotype, final GenomeLoc paddedRefLoc, final boolean isAmongBestHaplotypes) {
final GATKSAMRecord record = new GATKSAMRecord(bamHeader);
record.setReadBases(haplotype.getBases());
record.setAlignmentStart(paddedRefLoc.getStart() + haplotype.getAlignmentStartHapwrtRef());
record.setBaseQualities(Utils.dupBytes((byte) '!', haplotype.getBases().length));
record.setCigar(haplotype.getCigar());
record.setMappingQuality(isAmongBestHaplotypes ? 60 : 0);
record.setReadName("HC" + uniqueNameCounter++);
record.setReadUnmappedFlag(false);
record.setReferenceIndex(paddedRefLoc.getContigIndex());
record.setAttribute(SAMTag.RG.toString(), readGroupId);
record.setFlags(16);
bamWriter.addAlignment(record);
}
private void writeReadAgainstHaplotype(final GATKSAMRecord read, final Haplotype haplotype, final int referenceStart) {
final SWPairwiseAlignment swPairwiseAlignment = new SWPairwiseAlignment(haplotype.getBases(), read.getReadBases(), 5.0, -10.0, -22.0, -1.2);
final int readStartOnHaplotype = swPairwiseAlignment.getAlignmentStart2wrt1();
final int readStartOnReference = referenceStart + haplotype.getAlignmentStartHapwrtRef() + readStartOnHaplotype;
read.setAlignmentStart(readStartOnReference);
final Cigar cigar = generateReadCigarFromHaplotype(read, readStartOnHaplotype, haplotype.getCigar());
read.setCigar(cigar);
bamWriter.addAlignment(read);
}
private Cigar generateReadCigarFromHaplotype(final GATKSAMRecord read, final int readStartOnHaplotype, final Cigar haplotypeCigar) {
int currentReadPos = 0;
int currentHapPos = 0;
final List<CigarElement> readCigarElements = new ArrayList<CigarElement>();
for ( final CigarElement cigarElement : haplotypeCigar.getCigarElements() ) {
if ( cigarElement.getOperator() == CigarOperator.D ) {
if ( currentReadPos > 0 )
readCigarElements.add(cigarElement);
} else if ( cigarElement.getOperator() == CigarOperator.M || cigarElement.getOperator() == CigarOperator.I ) {
final int elementLength = cigarElement.getLength();
final int nextReadPos = currentReadPos + elementLength;
final int nextHapPos = currentHapPos + elementLength;
// do we want this element?
if ( currentReadPos > 0 ) {
// do we want the entire element?
if ( nextReadPos < read.getReadLength() ) {
readCigarElements.add(cigarElement);
currentReadPos = nextReadPos;
}
// otherwise, we can finish up and return the cigar
else {
readCigarElements.add(new CigarElement(read.getReadLength() - currentReadPos, cigarElement.getOperator()));
return new Cigar(readCigarElements);
}
}
// do we want part of the element to start?
else if ( currentReadPos == 0 && nextHapPos > readStartOnHaplotype ) {
currentReadPos = Math.min(nextHapPos - readStartOnHaplotype, read.getReadLength());
readCigarElements.add(new CigarElement(currentReadPos, cigarElement.getOperator()));
}
currentHapPos = nextHapPos;
}
}
return new Cigar(readCigarElements);
}
/*
private int determinePruneFactorFromCoverage( final ActiveRegion activeRegion ) {
final ArrayList<Integer> readLengthDistribution = new ArrayList<Integer>();

View File

@ -124,9 +124,14 @@ public class LikelihoodCalculationEngine {
}
private PerReadAlleleLikelihoodMap computeReadLikelihoods( final ArrayList<Haplotype> haplotypes, final ArrayList<GATKSAMRecord> reads) {
// first, a little set up to get copies of the Haplotypes that are Alleles (more efficient than creating them each time)
final int numHaplotypes = haplotypes.size();
final Map<Haplotype, Allele> alleleVersions = new HashMap<Haplotype, Allele>(numHaplotypes);
for ( final Haplotype haplotype : haplotypes ) {
alleleVersions.put(haplotype, Allele.create(haplotype.getBases()));
}
final PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap = new PerReadAlleleLikelihoodMap();
final int numHaplotypes = haplotypes.size();
for( final GATKSAMRecord read : reads ) {
final byte[] overallGCP = new byte[read.getReadLength()];
Arrays.fill( overallGCP, constantGCP ); // Is there a way to derive empirical estimates for this from the data?
@ -138,20 +143,17 @@ public class LikelihoodCalculationEngine {
readQuals[kkk] = ( readQuals[kkk] > (byte) read.getMappingQuality() ? (byte) read.getMappingQuality() : readQuals[kkk] ); // cap base quality by mapping quality
//readQuals[kkk] = ( readQuals[kkk] > readInsQuals[kkk] ? readInsQuals[kkk] : readQuals[kkk] ); // cap base quality by base insertion quality, needs to be evaluated
//readQuals[kkk] = ( readQuals[kkk] > readDelQuals[kkk] ? readDelQuals[kkk] : readQuals[kkk] ); // cap base quality by base deletion quality, needs to be evaluated
// TODO -- why is Q18 hard-coded here???
readQuals[kkk] = ( readQuals[kkk] < (byte) 18 ? QualityUtils.MIN_USABLE_Q_SCORE : readQuals[kkk] );
}
for( int jjj = 0; jjj < numHaplotypes; jjj++ ) {
final Haplotype haplotype = haplotypes.get(jjj);
// TODO -- need to test against a reference/position with non-standard bases
//if ( !Allele.acceptableAlleleBases(haplotype.getBases(), false) )
// continue;
final int haplotypeStart = ( previousHaplotypeSeen == null ? 0 : computeFirstDifferingPosition(haplotype.getBases(), previousHaplotypeSeen.getBases()) );
previousHaplotypeSeen = haplotype;
perReadAlleleLikelihoodMap.add(read, Allele.create(haplotype.getBases()),
perReadAlleleLikelihoodMap.add(read, alleleVersions.get(haplotype),
pairHMM.computeReadLikelihoodGivenHaplotypeLog10(haplotype.getBases(), read.getReadBases(),
readQuals, readInsQuals, readDelQuals, overallGCP, haplotypeStart, jjj == 0));
}

View File

@ -338,7 +338,7 @@ public class SimpleDeBruijnAssembler extends LocalAssemblyEngine {
for( final DefaultDirectedGraph<DeBruijnVertex, DeBruijnEdge> graph : graphs ) {
for ( final KBestPaths.Path path : KBestPaths.getKBestPaths(graph, NUM_BEST_PATHS_PER_KMER_GRAPH) ) {
final Haplotype h = new Haplotype( path.getBases( graph ), path.getScore() );
final Haplotype h = new Haplotype( path.getBases( graph ) );
if( addHaplotype( h, fullReferenceWithPadding, returnHaplotypes, activeRegionStart, activeRegionStop, false ) ) {
// for GGA mode, add the desired allele into the haplotype if it isn't already present

View File

@ -376,7 +376,7 @@ public class ValidationAmplicons extends RodWalker<Integer,Integer> {
if ( lowerCaseSNPs ) {
sequence.append(Character.toLowerCase((char) ref.getBase()));
} else {
sequence.append((char) BaseUtils.N);
sequence.append((char) BaseUtils.Base.N.base);
}
rawSequence.append(Character.toUpperCase((char) ref.getBase()));

View File

@ -272,22 +272,7 @@ public class ConcordanceMetrics {
@Requires({"evalVC != null","truthVC != null"})
private SiteConcordanceType getMatchType(VariantContext evalVC, VariantContext truthVC) {
if ( evalVC.isMonomorphicInSamples() )
return SiteConcordanceType.TRUTH_ONLY;
if ( truthVC.isMonomorphicInSamples() )
return SiteConcordanceType.EVAL_ONLY;
boolean evalSusbsetTruth = VariantContextUtils.allelesAreSubset(evalVC,truthVC);
boolean truthSubsetEval = VariantContextUtils.allelesAreSubset(truthVC,evalVC);
if ( evalSusbsetTruth && truthSubsetEval )
return SiteConcordanceType.ALLELES_MATCH;
else if ( evalSusbsetTruth )
return SiteConcordanceType.EVAL_SUBSET_TRUTH;
else if ( truthSubsetEval )
return SiteConcordanceType.EVAL_SUPERSET_TRUTH;
return SiteConcordanceType.ALLELES_DO_NOT_MATCH;
return SiteConcordanceType.getConcordanceType(evalVC,truthVC);
}
public int[] getSiteConcordance() {
@ -305,6 +290,27 @@ public class ConcordanceMetrics {
EVAL_SUBSET_TRUTH,
ALLELES_DO_NOT_MATCH,
EVAL_ONLY,
TRUTH_ONLY
TRUTH_ONLY;
public static SiteConcordanceType getConcordanceType(VariantContext eval, VariantContext truth) {
if ( eval.isMonomorphicInSamples() )
return TRUTH_ONLY;
if ( truth.isMonomorphicInSamples() )
return EVAL_ONLY;
boolean evalSubsetTruth = VariantContextUtils.allelesAreSubset(eval,truth);
boolean truthSubsetEval = VariantContextUtils.allelesAreSubset(eval,truth);
if ( evalSubsetTruth && truthSubsetEval )
return ALLELES_MATCH;
if ( evalSubsetTruth )
return EVAL_SUBSET_TRUTH;
if ( truthSubsetEval )
return EVAL_SUPERSET_TRUTH;
return ALLELES_DO_NOT_MATCH;
}
}
}

View File

@ -64,7 +64,7 @@ import java.util.*;
/**
* A simple walker for performing genotype concordance calculations between two callsets
*/
public class GenotypeConcordance extends RodWalker<Pair<VariantContext,VariantContext>,ConcordanceMetrics> {
public class GenotypeConcordance extends RodWalker<List<Pair<VariantContext,VariantContext>>,ConcordanceMetrics> {
@Input(fullName="eval",shortName="eval",doc="The variants and genotypes to evaluate",required=true)
RodBinding<VariantContext> evalBinding;
@ -75,21 +75,41 @@ public class GenotypeConcordance extends RodWalker<Pair<VariantContext,VariantCo
@Argument(fullName="ignoreFilters",doc="Filters will be ignored",required=false)
boolean ignoreFilters = false;
@Argument(shortName="gfe", fullName="genotypeFilterExpressionEval", doc="One or more criteria to use to set EVAL genotypes to no-call. "+
"These genotype-level filters are only applied to the EVAL rod.", required=false)
public ArrayList<String> genotypeFilterExpressionsEval = new ArrayList<String>();
@Argument(shortName="gfc", fullName="genotypeFilterExpressionComp", doc="One or more criteria to use to set COMP genotypes to no-call. "+
"These genotype-level filters are only applied to the COMP rod.", required=false)
public ArrayList<String> genotypeFilterExpressionsComp = new ArrayList<String>();
@Argument(shortName="moltenize",fullName="moltenize",doc="Molten rather than tabular output")
public boolean moltenize = false;
@Output
PrintStream out;
List<String> evalSamples;
List<String> compSamples;
private List<String> evalSamples;
private List<String> compSamples;
private List<VariantContextUtils.JexlVCMatchExp> evalJexls = null;
private List<VariantContextUtils.JexlVCMatchExp> compJexls = null;
// todo -- integration test coverage
// todo -- deal with occurrences like:
// Eval: 20 4000 A C
// Eval: 20 4000 A AC
// Comp: 20 4000 A C
// currently this results in a warning and skipping
// todo -- extend to multiple eval, multiple comp
// todo -- table with "proportion of overlapping sites" (not just eval/comp margins)
// todo -- table with "proportion of overlapping sites" (not just eval/comp margins) [e.g. drop no-calls]
// (this will break all the integration tests of course, due to new formatting)
public void initialize() {
evalJexls = initializeJexl(genotypeFilterExpressionsEval);
compJexls = initializeJexl(genotypeFilterExpressionsComp);
}
private List<VariantContextUtils.JexlVCMatchExp> initializeJexl(ArrayList<String> genotypeFilterExpressions) {
ArrayList<String> dummyNames = new ArrayList<String>(genotypeFilterExpressions.size());
int expCount = 1;
for ( String exp : genotypeFilterExpressions ) {
dummyNames.add(String.format("gfe%d",expCount++));
}
return VariantContextUtils.initializeMatchExps(dummyNames, genotypeFilterExpressions);
}
public ConcordanceMetrics reduceInit() {
Map<String,VCFHeader> headerMap = GATKVCFUtils.getVCFHeadersFromRods(getToolkit(), Arrays.asList(evalBinding,compBinding));
@ -101,120 +121,320 @@ public class GenotypeConcordance extends RodWalker<Pair<VariantContext,VariantCo
}
public Pair<VariantContext,VariantContext> map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
Pair<VariantContext,VariantContext> evalCompPair = null;
public List<Pair<VariantContext,VariantContext>> map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
List<Pair<VariantContext,VariantContext>> evalCompPair = new ArrayList<Pair<VariantContext,VariantContext>>(3);
if ( tracker != null && (
tracker.getValues(evalBinding,ref.getLocus()).size() > 0 ||
tracker.getValues(compBinding,ref.getLocus()).size() > 0 ) ) {
tracker.getValues(compBinding,ref.getLocus()).size() > 0 ) ) {
List<VariantContext> eval = tracker.getValues(evalBinding,ref.getLocus());
List<VariantContext> comp = tracker.getValues(compBinding,ref.getLocus());
if ( eval.size() > 1 || comp.size() > 1 ) {
logger.warn("Eval or Comp Rod at position "+ref.getLocus().toString()+" has multiple records. Site will be skipped.");
return evalCompPair;
if ( noDuplicateTypes(eval) && noDuplicateTypes(comp) ) {
logger.info("Eval or Comp Rod at position " + ref.getLocus().toString() + " has multiple records. Resolving.");
evalCompPair = resolveMultipleRecords(eval,comp);
} else {
logger.warn("Eval or Comp Rod at position "+ref.getLocus().toString()+" has multiple records of the same type. This locus will be skipped.");
}
} else {
// if a rod is missing, explicitly create a variant context with 'missing' genotypes. Slow, but correct.
// note that if there is no eval rod there must be a comp rod, and also the reverse
VariantContext evalContext = eval.size() == 1 ? eval.get(0) : createEmptyContext(comp.get(0),evalSamples);
VariantContext compContext = comp.size() == 1 ? comp.get(0) : createEmptyContext(eval.get(0),compSamples);
evalContext = filterGenotypes(evalContext,ignoreFilters,evalJexls);
compContext = filterGenotypes(compContext,ignoreFilters,compJexls);
evalCompPair.add(new Pair<VariantContext, VariantContext>(evalContext,compContext));
}
// if a rod is missing, explicitly create a variant context with 'missing' genotypes. Slow, but correct.
// note that if there is no eval rod there must be a comp rod, and also the reverse
VariantContext evalContext = eval.size() == 1 ? eval.get(0) : createEmptyContext(ref,comp.get(0),evalSamples);
VariantContext compContext = comp.size() == 1 ? comp.get(0) : createEmptyContext(ref,eval.get(0),compSamples);
evalContext = filterGenotypes(evalContext,ignoreFilters);
compContext = filterGenotypes(compContext,ignoreFilters);
evalCompPair = new Pair<VariantContext, VariantContext>(evalContext,compContext);
}
return evalCompPair;
}
public ConcordanceMetrics reduce(Pair<VariantContext,VariantContext> evalComp, ConcordanceMetrics metrics) {
if ( evalComp != null )
private boolean noDuplicateTypes(List<VariantContext> vcList) {
HashSet<VariantContext.Type> types = new HashSet<VariantContext.Type>(vcList.size());
for ( VariantContext vc : vcList ) {
VariantContext.Type type = vc.getType();
if ( types.contains(type) )
return false;
types.add(type);
}
return true;
}
/**
* The point of this method is to match up pairs of evals and comps by their type (or alternate alleles for mixed).
* Basically multiple records could exist for a site such as:
* Eval: 20 4000 A C
* Eval: 20 4000 A AC
* Comp: 20 4000 A C
* So for each eval, loop through the comps. If the eval alleles (non-emptily) intersect the comp alleles, pair them up and remove
* that comp records. Continue until we're out of evals or comps. This is n^2, but should rarely actually happen.
*
* The remaining unpaired records get paird with an empty contexts. So in the example above we'd get a list of:
* 1 - (20,4000,A/C | 20,4000,A/C)
* 2 - (20,4000,A/AC | Empty )
* @param evalList - list of eval variant contexts
* @param compList - list of comp variant contexts
* @return resolved pairs of the input lists
*/
private List<Pair<VariantContext,VariantContext>> resolveMultipleRecords(List<VariantContext> evalList, List<VariantContext> compList) {
List<Pair<VariantContext,VariantContext>> resolvedPairs = new ArrayList<Pair<VariantContext,VariantContext>>(evalList.size()+compList.size()); // oversized but w/e
List<VariantContext> pairedEval = new ArrayList<VariantContext>(evalList.size());
for ( VariantContext eval : evalList ) {
VariantContext.Type evalType = eval.getType();
Set<Allele> evalAlleles = new HashSet<Allele>(eval.getAlternateAlleles());
VariantContext pairedComp = null;
for ( VariantContext comp : compList ) {
if ( evalType.equals(comp.getType()) ) {
pairedComp = comp;
break;
} else if ( eval.isMixed() || comp.isMixed() ) {
for ( Allele compAllele : comp.getAlternateAlleles() ) {
if ( evalAlleles.contains(compAllele) ) {
pairedComp = comp;
break;
}
}
}
}
if ( pairedComp != null ) {
compList.remove(pairedComp);
resolvedPairs.add(new Pair<VariantContext, VariantContext>(filterGenotypes(eval,ignoreFilters,evalJexls),filterGenotypes(pairedComp,ignoreFilters,compJexls)));
pairedEval.add(eval);
if ( compList.size() < 1 )
break;
}
}
evalList.removeAll(pairedEval);
for ( VariantContext unpairedEval : evalList ) {
resolvedPairs.add(new Pair<VariantContext, VariantContext>(filterGenotypes(unpairedEval,ignoreFilters,evalJexls),createEmptyContext(unpairedEval,compSamples)));
}
for ( VariantContext unpairedComp : compList ) {
resolvedPairs.add(new Pair<VariantContext, VariantContext>(createEmptyContext(unpairedComp,evalSamples),filterGenotypes(unpairedComp,ignoreFilters,compJexls)));
}
return resolvedPairs;
}
public ConcordanceMetrics reduce(List<Pair<VariantContext,VariantContext>> evalCompList, ConcordanceMetrics metrics) {
for ( Pair<VariantContext,VariantContext> evalComp : evalCompList)
metrics.update(evalComp.getFirst(),evalComp.getSecond());
return metrics;
}
private static double repairNaN(double d) {
if ( Double.isNaN(d) ) {
return 0.0;
}
return d;
}
public void onTraversalDone(ConcordanceMetrics metrics) {
// todo -- this is over 200 lines of code just to format the output and could use some serious cleanup
GATKReport report = new GATKReport();
GATKReportTable concordanceCounts = new GATKReportTable("GenotypeConcordance_Counts","Per-sample concordance tables: comparison counts",2+GenotypeType.values().length*GenotypeType.values().length);
GATKReportTable concordanceEvalProportions = new GATKReportTable("GenotypeConcordance_EvalProportions", "Per-sample concordance tables: proportions of genotypes called in eval",2+GenotypeType.values().length*GenotypeType.values().length);
GATKReportTable concordanceCompProportions = new GATKReportTable("GenotypeConcordance_CompProportions", "Per-sample concordance tables: proportions of genotypes called in comp",2+GenotypeType.values().length*GenotypeType.values().length);
GATKReportTable concordanceSummary = new GATKReportTable("GenotypeConcordance_Summary","Per-sample summary statistics: NRS and NRD",2);
GATKReportTable siteConcordance = new GATKReportTable("SiteConcordance_Summary","Site-level summary statistics",ConcordanceMetrics.SiteConcordanceType.values().length);
concordanceCompProportions.addColumn("Sample","%s");
concordanceCounts.addColumn("Sample","%s");
concordanceEvalProportions.addColumn("Sample","%s");
concordanceSummary.addColumn("Sample","%s");
for ( GenotypeType evalType : GenotypeType.values() ) {
for ( GenotypeType compType : GenotypeType.values() ) {
String colKey = String.format("%s_%s", evalType.toString(), compType.toString());
concordanceCounts.addColumn(colKey,"%d");
if ( evalType == GenotypeType.HET || evalType == GenotypeType.HOM_REF || evalType == GenotypeType.HOM_VAR)
concordanceEvalProportions.addColumn(colKey,"%.3f");
if ( compType == GenotypeType.HET || compType == GenotypeType.HOM_VAR || compType == GenotypeType.HOM_REF )
concordanceCompProportions.addColumn(colKey,"%.3f");
}
}
concordanceEvalProportions.addColumn("Mismatching_Alleles","%.3f");
concordanceCompProportions.addColumn("Mismatching_Alleles","%.3f");
concordanceCounts.addColumn("Mismatching_Alleles","%d");
concordanceSummary.addColumn("Non-Reference Sensitivity","%.3f");
concordanceSummary.addColumn("Non-Reference Discrepancy","%.3f");
for (ConcordanceMetrics.SiteConcordanceType type : ConcordanceMetrics.SiteConcordanceType.values() ) {
siteConcordance.addColumn(type.toString(),"%d");
}
if ( moltenize ) {
concordanceCompProportions.addColumn("Sample","%s");
concordanceCounts.addColumn("Sample","%s");
concordanceEvalProportions.addColumn("Sample","%s");
concordanceSummary.addColumn("Sample","%s");
for ( Map.Entry<String,ConcordanceMetrics.GenotypeConcordanceTable> entry : metrics.getPerSampleGenotypeConcordance().entrySet() ) {
ConcordanceMetrics.GenotypeConcordanceTable table = entry.getValue();
concordanceEvalProportions.set(entry.getKey(),"Sample",entry.getKey());
concordanceCompProportions.set(entry.getKey(),"Sample",entry.getKey());
concordanceCounts.set(entry.getKey(),"Sample",entry.getKey());
concordanceCompProportions.addColumn("Eval_Genotype","%s");
concordanceCounts.addColumn("Eval_Genotype","%s");
concordanceEvalProportions.addColumn("Eval_Genotype","%s");
concordanceSummary.addColumn("Non-Reference_Discrepancy","%.3f");
concordanceCompProportions.addColumn("Comp_Genotype","%s");
concordanceCounts.addColumn("Comp_Genotype","%s");
concordanceEvalProportions.addColumn("Comp_Genotype","%s");
concordanceSummary.addColumn("Non-Reference_Sensitivity","%.3f");
concordanceCompProportions.addColumn("Proportion","%.3f");
concordanceCounts.addColumn("Count","%d");
concordanceEvalProportions.addColumn("Proportion","%.3f");
for ( Map.Entry<String,ConcordanceMetrics.GenotypeConcordanceTable> entry : metrics.getPerSampleGenotypeConcordance().entrySet() ) {
ConcordanceMetrics.GenotypeConcordanceTable table = entry.getValue();
for ( GenotypeType evalType : GenotypeType.values() ) {
for ( GenotypeType compType : GenotypeType.values() ) {
String rowKey = String.format("%s_%s_%s",entry.getKey(),evalType.toString(),compType.toString());
concordanceCounts.set(rowKey,"Sample",entry.getKey());
concordanceCounts.set(rowKey,"Eval_Genotype",evalType.toString());
concordanceCounts.set(rowKey,"Comp_Genotype",evalType.toString());
int count = table.get(evalType, compType);
concordanceCounts.set(rowKey,"Count",count);
if ( evalType == GenotypeType.HET || evalType == GenotypeType.HOM_REF || evalType == GenotypeType.HOM_VAR) {
concordanceEvalProportions.set(rowKey,"Sample",entry.getKey());
concordanceEvalProportions.set(rowKey,"Eval_Genotype",evalType.toString());
concordanceEvalProportions.set(rowKey,"Comp_Genotype",evalType.toString());
concordanceEvalProportions.set(rowKey,"Proportion",repairNaN(( (double) count)/table.getnEvalGenotypes(evalType)));
}
if ( compType == GenotypeType.HET || compType == GenotypeType.HOM_VAR || compType == GenotypeType.HOM_REF ) {
concordanceCompProportions.set(rowKey,"Sample",entry.getKey());
concordanceCompProportions.set(rowKey,"Eval_Genotype",evalType.toString());
concordanceCompProportions.set(rowKey,"Comp_Genotype",evalType.toString());
concordanceCompProportions.set(rowKey,"Proportion",repairNaN(( (double) count)/table.getnCompGenotypes(compType)));
}
}
}
String mismatchKey = String.format("%s_%s",entry.getKey(),"Mismatching");
concordanceCounts.set(mismatchKey,"Sample",entry.getKey());
concordanceCounts.set(mismatchKey,"Eval_Genotype","Mismatching_Alleles");
concordanceCounts.set(mismatchKey,"Comp_Genotype","Mismatching_Alleles");
concordanceEvalProportions.set(mismatchKey,"Sample",entry.getKey());
concordanceEvalProportions.set(mismatchKey,"Eval_Genotype","Mismatching_Alleles");
concordanceEvalProportions.set(mismatchKey,"Comp_Genotype","Mismatching_Alleles");
concordanceCompProportions.set(mismatchKey,"Sample",entry.getKey());
concordanceCompProportions.set(mismatchKey,"Eval_Genotype","Mismatching_Alleles");
concordanceCompProportions.set(mismatchKey,"Comp_Genotype","Mismatching_Alleles");
concordanceEvalProportions.set(mismatchKey,"Proportion", repairNaN(( (double) table.getnMismatchingAlt() )/table.getnCalledEvalGenotypes()));
concordanceCompProportions.set(mismatchKey,"Proportion", repairNaN(( (double) table.getnMismatchingAlt() )/table.getnCalledCompGenotypes()));
concordanceCounts.set(mismatchKey,"Count",table.getnMismatchingAlt());
}
String sampleKey = "ALL";
ConcordanceMetrics.GenotypeConcordanceTable table = metrics.getOverallGenotypeConcordance();
for ( GenotypeType evalType : GenotypeType.values() ) {
for ( GenotypeType compType : GenotypeType.values() ) {
String rowKey = String.format("%s_%s_%s",sampleKey,evalType.toString(),compType.toString());
concordanceCounts.set(rowKey,"Sample",sampleKey);
concordanceCounts.set(rowKey,"Eval_Genotype",evalType.toString());
concordanceCounts.set(rowKey,"Comp_Genotype",evalType.toString());
int count = table.get(evalType, compType);
concordanceCounts.set(rowKey,"Count",count);
if ( evalType == GenotypeType.HET || evalType == GenotypeType.HOM_REF || evalType == GenotypeType.HOM_VAR) {
concordanceEvalProportions.set(rowKey,"Sample",sampleKey);
concordanceEvalProportions.set(rowKey,"Eval_Genotype",evalType.toString());
concordanceEvalProportions.set(rowKey,"Comp_Genotype",evalType.toString());
concordanceEvalProportions.set(rowKey,"Proportion",repairNaN(( (double) count)/table.getnEvalGenotypes(evalType)));
}
if ( compType == GenotypeType.HET || compType == GenotypeType.HOM_VAR || compType == GenotypeType.HOM_REF ) {
concordanceCompProportions.set(rowKey,"Sample",sampleKey);
concordanceCompProportions.set(rowKey,"Eval_Genotype",evalType.toString());
concordanceCompProportions.set(rowKey,"Comp_Genotype",evalType.toString());
concordanceCompProportions.set(rowKey,"Proportion",repairNaN(( (double) count)/table.getnCompGenotypes(compType)));
}
}
}
String rowKey = String.format("%s_%s",sampleKey,"Mismatching");
concordanceCounts.set(rowKey,"Sample",sampleKey);
concordanceCounts.set(rowKey,"Eval_Genotype","Mismatching_Alleles");
concordanceCounts.set(rowKey,"Comp_Genotype","Mismatching_Alleles");
concordanceEvalProportions.set(rowKey,"Sample",sampleKey);
concordanceEvalProportions.set(rowKey,"Eval_Genotype","Mismatching_Alleles");
concordanceEvalProportions.set(rowKey,"Comp_Genotype","Mismatching_Alleles");
concordanceCompProportions.set(rowKey,"Sample",sampleKey);
concordanceCompProportions.set(rowKey,"Eval_Genotype","Mismatching_Alleles");
concordanceCompProportions.set(rowKey,"Comp_Genotype","Mismatching_Alleles");
concordanceEvalProportions.set(rowKey,"Proportion", repairNaN(( (double) table.getnMismatchingAlt() )/table.getnCalledEvalGenotypes()));
concordanceCompProportions.set(rowKey,"Proportion", repairNaN(( (double) table.getnMismatchingAlt() )/table.getnCalledCompGenotypes()));
concordanceCounts.set(rowKey,"Count",table.getnMismatchingAlt());
for ( Map.Entry<String,Double> nrsEntry : metrics.getPerSampleNRS().entrySet() ) {
concordanceSummary.set(nrsEntry.getKey(),"Sample",nrsEntry.getKey());
concordanceSummary.set(nrsEntry.getKey(),"Non-Reference_Sensitivity",nrsEntry.getValue());
}
for ( Map.Entry<String,Double> nrdEntry : metrics.getPerSampleNRD().entrySet() ) {
concordanceSummary.set(nrdEntry.getKey(),"Non-Reference_Discrepancy",nrdEntry.getValue());
}
concordanceSummary.set("ALL_NRS_NRD","Sample","ALL");
concordanceSummary.set("ALL_NRS_NRD","Non-Reference_Sensitivity",metrics.getOverallNRS());
concordanceSummary.set("ALL_NRS_NRD","Non-Reference_Discrepancy",metrics.getOverallNRD());
for (ConcordanceMetrics.SiteConcordanceType type : ConcordanceMetrics.SiteConcordanceType.values() ) {
siteConcordance.addColumn(type.toString(),"%d");
}
for (ConcordanceMetrics.SiteConcordanceType type : ConcordanceMetrics.SiteConcordanceType.values() ) {
siteConcordance.set("Comparison",type.toString(),metrics.getOverallSiteConcordance().get(type));
}
} else {
concordanceCompProportions.addColumn("Sample","%s");
concordanceCounts.addColumn("Sample","%s");
concordanceEvalProportions.addColumn("Sample","%s");
concordanceSummary.addColumn("Sample","%s");
for ( GenotypeType evalType : GenotypeType.values() ) {
for ( GenotypeType compType : GenotypeType.values() ) {
String colKey = String.format("%s_%s", evalType.toString(), compType.toString());
concordanceCounts.addColumn(colKey,"%d");
if ( evalType == GenotypeType.HET || evalType == GenotypeType.HOM_REF || evalType == GenotypeType.HOM_VAR)
concordanceEvalProportions.addColumn(colKey,"%.3f");
if ( compType == GenotypeType.HET || compType == GenotypeType.HOM_VAR || compType == GenotypeType.HOM_REF )
concordanceCompProportions.addColumn(colKey,"%.3f");
}
}
concordanceEvalProportions.addColumn("Mismatching_Alleles","%.3f");
concordanceCompProportions.addColumn("Mismatching_Alleles","%.3f");
concordanceCounts.addColumn("Mismatching_Alleles","%d");
concordanceSummary.addColumn("Non-Reference Sensitivity","%.3f");
concordanceSummary.addColumn("Non-Reference Discrepancy","%.3f");
for (ConcordanceMetrics.SiteConcordanceType type : ConcordanceMetrics.SiteConcordanceType.values() ) {
siteConcordance.addColumn(type.toString(),"%d");
}
for ( Map.Entry<String,ConcordanceMetrics.GenotypeConcordanceTable> entry : metrics.getPerSampleGenotypeConcordance().entrySet() ) {
ConcordanceMetrics.GenotypeConcordanceTable table = entry.getValue();
concordanceEvalProportions.set(entry.getKey(),"Sample",entry.getKey());
concordanceCompProportions.set(entry.getKey(),"Sample",entry.getKey());
concordanceCounts.set(entry.getKey(),"Sample",entry.getKey());
for ( GenotypeType evalType : GenotypeType.values() ) {
for ( GenotypeType compType : GenotypeType.values() ) {
String colKey = String.format("%s_%s",evalType.toString(),compType.toString());
int count = table.get(evalType, compType);
concordanceCounts.set(entry.getKey(),colKey,count);
if ( evalType == GenotypeType.HET || evalType == GenotypeType.HOM_REF || evalType == GenotypeType.HOM_VAR)
concordanceEvalProportions.set(entry.getKey(),colKey,repairNaN(( (double) count)/table.getnEvalGenotypes(evalType)));
if ( compType == GenotypeType.HET || compType == GenotypeType.HOM_VAR || compType == GenotypeType.HOM_REF )
concordanceCompProportions.set(entry.getKey(),colKey,repairNaN(( (double) count)/table.getnCompGenotypes(compType)));
}
}
concordanceEvalProportions.set(entry.getKey(),"Mismatching_Alleles", repairNaN(( (double) table.getnMismatchingAlt() )/table.getnCalledEvalGenotypes()));
concordanceCompProportions.set(entry.getKey(),"Mismatching_Alleles", repairNaN(( (double) table.getnMismatchingAlt() )/table.getnCalledCompGenotypes()));
concordanceCounts.set(entry.getKey(),"Mismatching_Alleles",table.getnMismatchingAlt());
}
String rowKey = "ALL";
concordanceCompProportions.set(rowKey,"Sample",rowKey);
concordanceEvalProportions.set(rowKey,"Sample",rowKey);
concordanceCounts.set(rowKey,"Sample",rowKey);
ConcordanceMetrics.GenotypeConcordanceTable table = metrics.getOverallGenotypeConcordance();
for ( GenotypeType evalType : GenotypeType.values() ) {
for ( GenotypeType compType : GenotypeType.values() ) {
String colKey = String.format("%s_%s",evalType.toString(),compType.toString());
int count = table.get(evalType, compType);
concordanceCounts.set(entry.getKey(),colKey,count);
int count = table.get(evalType,compType);
concordanceCounts.set(rowKey,colKey,count);
if ( evalType == GenotypeType.HET || evalType == GenotypeType.HOM_REF || evalType == GenotypeType.HOM_VAR)
concordanceEvalProportions.set(entry.getKey(),colKey,( (double) count)/table.getnEvalGenotypes(evalType));
concordanceEvalProportions.set(rowKey,colKey,repairNaN(( (double) count)/table.getnEvalGenotypes(evalType)));
if ( compType == GenotypeType.HET || compType == GenotypeType.HOM_VAR || compType == GenotypeType.HOM_REF )
concordanceCompProportions.set(entry.getKey(),colKey,( (double) count)/table.getnCompGenotypes(compType));
concordanceCompProportions.set(rowKey,colKey,repairNaN(( (double) count)/table.getnCompGenotypes(compType)));
}
}
concordanceEvalProportions.set(entry.getKey(),"Mismatching_Alleles", ( (double) table.getnMismatchingAlt() )/table.getnCalledEvalGenotypes());
concordanceCompProportions.set(entry.getKey(),"Mismatching_Alleles", ( (double) table.getnMismatchingAlt() )/table.getnCalledCompGenotypes());
concordanceCounts.set(entry.getKey(),"Mismatching_Alleles",table.getnMismatchingAlt());
}
concordanceEvalProportions.set(rowKey,"Mismatching_Alleles", repairNaN(( (double) table.getnMismatchingAlt() )/table.getnCalledEvalGenotypes()));
concordanceCompProportions.set(rowKey,"Mismatching_Alleles", repairNaN(( (double) table.getnMismatchingAlt() )/table.getnCalledCompGenotypes()));
concordanceCounts.set(rowKey,"Mismatching_Alleles",table.getnMismatchingAlt());
String rowKey = "ALL";
concordanceCompProportions.set(rowKey,"Sample",rowKey);
concordanceEvalProportions.set(rowKey,"Sample",rowKey);
concordanceCounts.set(rowKey,"Sample",rowKey);
ConcordanceMetrics.GenotypeConcordanceTable table = metrics.getOverallGenotypeConcordance();
for ( GenotypeType evalType : GenotypeType.values() ) {
for ( GenotypeType compType : GenotypeType.values() ) {
String colKey = String.format("%s_%s",evalType.toString(),compType.toString());
int count = table.get(evalType,compType);
concordanceCounts.set(rowKey,colKey,count);
if ( evalType == GenotypeType.HET || evalType == GenotypeType.HOM_REF || evalType == GenotypeType.HOM_VAR)
concordanceEvalProportions.set(rowKey,colKey,( (double) count)/table.getnEvalGenotypes(evalType));
if ( compType == GenotypeType.HET || compType == GenotypeType.HOM_VAR || compType == GenotypeType.HOM_REF )
concordanceCompProportions.set(rowKey,colKey,( (double) count)/table.getnCompGenotypes(compType));
for ( Map.Entry<String,Double> nrsEntry : metrics.getPerSampleNRS().entrySet() ) {
concordanceSummary.set(nrsEntry.getKey(),"Sample",nrsEntry.getKey());
concordanceSummary.set(nrsEntry.getKey(),"Non-Reference Sensitivity",nrsEntry.getValue());
}
}
concordanceEvalProportions.set(rowKey,"Mismatching_Alleles", ( (double) table.getnMismatchingAlt() )/table.getnCalledEvalGenotypes());
concordanceCompProportions.set(rowKey,"Mismatching_Alleles", ( (double) table.getnMismatchingAlt() )/table.getnCalledCompGenotypes());
concordanceCounts.set(rowKey,"Mismatching_Alleles",table.getnMismatchingAlt());
for ( Map.Entry<String,Double> nrdEntry : metrics.getPerSampleNRD().entrySet() ) {
concordanceSummary.set(nrdEntry.getKey(),"Non-Reference Discrepancy",nrdEntry.getValue());
}
concordanceSummary.set("ALL","Sample","ALL");
concordanceSummary.set("ALL","Non-Reference Sensitivity",metrics.getOverallNRS());
concordanceSummary.set("ALL","Non-Reference Discrepancy",metrics.getOverallNRD());
for ( Map.Entry<String,Double> nrsEntry : metrics.getPerSampleNRS().entrySet() ) {
concordanceSummary.set(nrsEntry.getKey(),"Sample",nrsEntry.getKey());
concordanceSummary.set(nrsEntry.getKey(),"Non-Reference Sensitivity",nrsEntry.getValue());
}
for ( Map.Entry<String,Double> nrdEntry : metrics.getPerSampleNRD().entrySet() ) {
concordanceSummary.set(nrdEntry.getKey(),"Non-Reference Discrepancy",nrdEntry.getValue());
}
concordanceSummary.set("ALL","Sample","ALL");
concordanceSummary.set("ALL","Non-Reference Sensitivity",metrics.getOverallNRS());
concordanceSummary.set("ALL","Non-Reference Discrepancy",metrics.getOverallNRD());
for (ConcordanceMetrics.SiteConcordanceType type : ConcordanceMetrics.SiteConcordanceType.values() ) {
siteConcordance.set("Comparison",type.toString(),metrics.getOverallSiteConcordance().get(type));
for (ConcordanceMetrics.SiteConcordanceType type : ConcordanceMetrics.SiteConcordanceType.values() ) {
siteConcordance.set("Comparison",type.toString(),metrics.getOverallSiteConcordance().get(type));
}
}
report.addTable(concordanceCompProportions);
@ -226,7 +446,7 @@ public class GenotypeConcordance extends RodWalker<Pair<VariantContext,VariantCo
report.print(out);
}
public VariantContext createEmptyContext(ReferenceContext ref, VariantContext other, List<String> samples) {
public VariantContext createEmptyContext(VariantContext other, List<String> samples) {
VariantContextBuilder builder = new VariantContextBuilder();
// set the alleles to be the same
builder.alleles(other.getAlleles());
@ -239,13 +459,29 @@ public class GenotypeConcordance extends RodWalker<Pair<VariantContext,VariantCo
return builder.make();
}
public VariantContext filterGenotypes(VariantContext context, boolean ignoreSiteFilter) {
// placeholder method for genotype-level filtering. However if the site itself is filtered,
// and such filters are not ignored, the genotype-level data should be altered to reflect this
public VariantContext filterGenotypes(VariantContext context, boolean ignoreSiteFilter, List<VariantContextUtils.JexlVCMatchExp> exps) {
if ( ! context.isFiltered() || ignoreSiteFilter ) {
// todo -- add genotype-level jexl filtering here
return context;
List<Genotype> filteredGenotypes = new ArrayList<Genotype>(context.getNSamples());
for ( Genotype g : context.getGenotypes() ) {
Map<VariantContextUtils.JexlVCMatchExp, Boolean> matchMap = VariantContextUtils.match(context, g, exps);
boolean filtered = false;
for ( Boolean b : matchMap.values() ) {
if ( b ) {
filtered = true;
break;
}
}
if ( filtered ) {
filteredGenotypes.add(GenotypeBuilder.create(g.getSampleName(),Arrays.asList(Allele.NO_CALL,Allele.NO_CALL),g.getExtendedAttributes()));
} else {
filteredGenotypes.add(g);
}
}
VariantContextBuilder builder = new VariantContextBuilder(context);
builder.genotypes(filteredGenotypes);
return builder.make();
}
VariantContextBuilder builder = new VariantContextBuilder();
builder.alleles(Arrays.asList(context.getReference()));
builder.loc(context.getChr(),context.getStart(),context.getEnd());
@ -256,4 +492,4 @@ public class GenotypeConcordance extends RodWalker<Pair<VariantContext,VariantCo
builder.genotypes(newGeno);
return builder.make();
}
}
}

View File

@ -57,6 +57,8 @@ import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.RodWalker;
import org.broadinstitute.sting.gatk.walkers.TreeReducible;
import org.broadinstitute.sting.utils.SampleUtils;
import org.broadinstitute.sting.utils.interval.IntervalMergingRule;
import org.broadinstitute.sting.utils.interval.IntervalSetRule;
import org.broadinstitute.sting.utils.variant.GATKVCFUtils;
import org.broadinstitute.variant.vcf.*;
import org.broadinstitute.sting.utils.help.DocumentedGATKFeature;
@ -180,18 +182,47 @@ public class SelectHeaders extends RodWalker<Integer, Integer> implements TreeRe
headerLines = new LinkedHashSet<VCFHeaderLine>(getSelectedHeaders(headerLines));
// Optionally add in the intervals.
if (includeIntervals && getToolkit().getArguments().intervals != null) {
for (IntervalBinding<Feature> intervalBinding : getToolkit().getArguments().intervals) {
String source = intervalBinding.getSource();
if (source == null)
continue;
File file = new File(source);
if (file.exists()) {
headerLines.add(new VCFHeaderLine(VCFHeader.INTERVALS_KEY, FilenameUtils.getBaseName(file.getName())));
} else {
headerLines.add(new VCFHeaderLine(VCFHeader.INTERVALS_KEY, source));
if (includeIntervals) {
IntervalArgumentCollection intervalArguments = getToolkit().getArguments().intervalArguments;
if (intervalArguments.intervals != null) {
for (IntervalBinding<Feature> intervalBinding : intervalArguments.intervals) {
String source = intervalBinding.getSource();
if (source == null)
continue;
File file = new File(source);
if (file.exists()) {
headerLines.add(new VCFHeaderLine(VCFHeader.INTERVALS_KEY, FilenameUtils.getBaseName(file.getName())));
} else {
headerLines.add(new VCFHeaderLine(VCFHeader.INTERVALS_KEY, source));
}
}
}
if (intervalArguments.excludeIntervals != null) {
for (IntervalBinding<Feature> intervalBinding : intervalArguments.excludeIntervals) {
String source = intervalBinding.getSource();
if (source == null)
continue;
File file = new File(source);
if (file.exists()) {
headerLines.add(new VCFHeaderLine(VCFHeader.EXCLUDE_INTERVALS_KEY, FilenameUtils.getBaseName(file.getName())));
} else {
headerLines.add(new VCFHeaderLine(VCFHeader.EXCLUDE_INTERVALS_KEY, source));
}
}
}
if (intervalArguments.intervalMerging != IntervalMergingRule.ALL) {
headerLines.add(new VCFHeaderLine(VCFHeader.INTERVAL_MERGING_KEY, String.valueOf(intervalArguments.intervalMerging)));
}
if (intervalArguments.intervalSetRule != IntervalSetRule.UNION) {
headerLines.add(new VCFHeaderLine(VCFHeader.INTERVAL_SET_RULE_KEY, String.valueOf(intervalArguments.intervalSetRule)));
}
if (intervalArguments.intervalPadding != 0) {
headerLines.add(new VCFHeaderLine(VCFHeader.INTERVAL_PADDING_KEY, String.valueOf(intervalArguments.intervalPadding)));
}
}
TreeSet<String> vcfSamples = new TreeSet<String>(SampleUtils.getSampleList(vcfRods, VariantContextUtils.GenotypeMergeType.REQUIRE_UNIQUE));

View File

@ -51,6 +51,7 @@ import net.sf.samtools.SAMReadGroupRecord;
import org.broadinstitute.sting.gatk.GenomeAnalysisEngine;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.utils.locusiterator.LocusIteratorByState;
import org.broadinstitute.variant.utils.BaseUtils;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser;
@ -214,8 +215,7 @@ public class ArtificialReadPileupTestProvider {
read.setReadNegativeStrandFlag(false);
read.setReadGroup(sampleRG(sample));
pileupElements.add(new PileupElement(read,readOffset,false,isBeforeDeletion, false, isBeforeInsertion,false,false,altBases,Math.abs(eventLength)));
pileupElements.add(LocusIteratorByState.createPileupForReadAndOffset(read, readOffset));
}
return pileupElements;

View File

@ -108,7 +108,7 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest {
public void testMultipleSNPAlleles() {
WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
"-T UnifiedGenotyper -R " + b37KGReference + " --no_cmdline_in_header -glm BOTH --dbsnp " + b37dbSNP129 + " -I " + privateTestDir + "multiallelic.snps.bam -o %s -L " + privateTestDir + "multiallelic.snps.intervals", 1,
Arrays.asList("b41b95aaa2c453c9b75b3b29a9c2718e"));
Arrays.asList("35479a79e1ce7c15493bd77e58cadcaa"));
executeTest("test Multiple SNP alleles", spec);
}
@ -124,7 +124,7 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest {
public void testReverseTrim() {
WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
"-T UnifiedGenotyper -R " + b37KGReference + " --no_cmdline_in_header -glm INDEL -I " + validationDataLocation + "CEUTrio.HiSeq.b37.chr20.10_11mb.bam -o %s -L 20:10289124 -L 20:10090289", 1,
Arrays.asList("44e9f6cf11b4efecb454cd3de8de9877"));
Arrays.asList("1e61de694b51d7c0f26da5179ee6bb0c"));
executeTest("test reverse trim", spec);
}
@ -238,12 +238,12 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest {
@Test
public void testOutputParameterAllConfident() {
testOutputParameters("--output_mode EMIT_ALL_CONFIDENT_SITES", "9dbc9389db39cf9697e93e0bf529314f");
testOutputParameters("--output_mode EMIT_ALL_CONFIDENT_SITES", "5649f72de04e1391e0f2bb86843d3d72");
}
@Test
public void testOutputParameterAllSites() {
testOutputParameters("--output_mode EMIT_ALL_SITES", "8b26088a035e579c4afd3b46737291e4");
testOutputParameters("--output_mode EMIT_ALL_SITES", "cb151bb9e90680b12714d481091ed209");
}
private void testOutputParameters(final String args, final String md5) {
@ -363,7 +363,7 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest {
" -o %s" +
" -L 1:10,000,000-10,500,000",
1,
Arrays.asList("39c7a813fd6ee82d3604f2a868b35b2a"));
Arrays.asList("8231ae37b52b927db9fc1e5c221b0ba0"));
executeTest(String.format("test indel calling, multiple technologies"), spec);
}
@ -391,13 +391,13 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest {
public void testMultiSampleIndels1() {
WalkerTest.WalkerTestSpec spec1 = new WalkerTest.WalkerTestSpec(
baseCommandIndels + " -I " + validationDataLocation + "low_coverage_CEU.chr1.10k-11k.bam -o %s -L 1:10450700-10551000", 1,
Arrays.asList("5667a699a3a13474f2d1cd2d6b01cd5b"));
Arrays.asList("a47810de2f6ef8087f4644064a0814bc"));
List<File> result = executeTest("test MultiSample Pilot1 CEU indels", spec1).getFirst();
WalkerTest.WalkerTestSpec spec2 = new WalkerTest.WalkerTestSpec(
baseCommandIndels + " --genotyping_mode GENOTYPE_GIVEN_ALLELES -alleles " + result.get(0).getAbsolutePath() + " -I " + validationDataLocation +
"low_coverage_CEU.chr1.10k-11k.bam -o %s -L 1:10450700-10551000", 1,
Arrays.asList("b6c1d5cd28ff584c5f5037afef4e883a"));
Arrays.asList("53b8d2b0fa63c5d1019855e8e0db28f0"));
executeTest("test MultiSample Pilot1 CEU indels using GENOTYPE_GIVEN_ALLELES", spec2);
}
@ -497,18 +497,18 @@ public class UnifiedGenotyperIntegrationTest extends WalkerTest {
public void testReducedBam() {
WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
"-T UnifiedGenotyper -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "bamExample.ReducedRead.ADAnnotation.bam -o %s -L 1:67,225,396-67,288,518", 1,
Arrays.asList("092e42a712afb660ec79ff11c55933e2"));
Arrays.asList("8b9a9fc2e7150acbe2dac91b4620f304"));
executeTest("test calling on a ReducedRead BAM", spec);
}
@Test
public void testReducedBamSNPs() {
testReducedCalling("SNP", "c0de74ab8f4f14eb3a2c5d55c200ac5f");
testReducedCalling("SNP", "b5991dddbfb59366614ff8819062649f");
}
@Test
public void testReducedBamINDELs() {
testReducedCalling("INDEL", "1c9aaf65ffaa12bb766855265a1c3f8e");
testReducedCalling("INDEL", "a85c110fcac9574a54c7daccb1e2d5ae");
}

View File

@ -50,6 +50,7 @@ import org.broadinstitute.sting.WalkerTest;
import org.testng.annotations.Test;
import java.util.Arrays;
import java.util.Collections;
public class HaplotypeCallerIntegrationTest extends WalkerTest {
final static String REF = b37KGReference;
@ -67,7 +68,7 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest {
@Test
public void testHaplotypeCallerMultiSample() {
HCTest(CEUTRIO_BAM, "", "35c8425b44429ac7468c2cd26f8f5a42");
HCTest(CEUTRIO_BAM, "", "b8f7b741445ce6b6ea491c794ce75c17");
}
@Test
@ -75,11 +76,33 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest {
HCTest(NA12878_BAM, "", "a2c63f6e6e51a01019bdbd23125bdb15");
}
// TODO -- add more tests for GGA mode, especially with input alleles that are complex variants and/or not trimmed
@Test(enabled = false)
public void testHaplotypeCallerSingleSampleWithDbsnp() {
HCTest(NA12878_BAM, "-D " + b37dbSNP132, "");
}
@Test
public void testHaplotypeCallerMultiSampleGGA() {
HCTest(CEUTRIO_BAM, "--max_alternate_alleles 3 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles " + validationDataLocation + "combined.phase1.chr20.raw.indels.sites.vcf",
"d918d25b22a551cae5d70ea30d7feed1");
"c679ae7f04bdfda896b5c046d35e043c");
}
private void HCTestComplexGGA(String bam, String args, String md5) {
final String base = String.format("-T HaplotypeCaller -R %s -I %s", REF, bam) + " --no_cmdline_in_header -o %s -minPruning 3 -gt_mode GENOTYPE_GIVEN_ALLELES -out_mode EMIT_ALL_SITES -alleles " + validationDataLocation + "combined.phase1.chr20.raw.indels.sites.vcf";
final WalkerTestSpec spec = new WalkerTestSpec(base + " " + args, Arrays.asList(md5));
executeTest("testHaplotypeCallerComplexGGA: args=" + args, spec);
}
@Test
public void testHaplotypeCallerMultiSampleGGAComplex() {
HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:119673-119823 -L 20:121408-121538",
"8730a9ebaeecae913dca2fb5a0d4e946");
}
@Test
public void testHaplotypeCallerMultiSampleGGAMultiAllelic() {
HCTestComplexGGA(NA12878_CHR20_BAM, "-L 20:133041-133161 -L 20:300207-300337",
"d590c8d6d5e58d685401b65a23846893");
}
private void HCTestComplexVariants(String bam, String args, String md5) {
@ -115,10 +138,15 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest {
HCTestIndelQualityScores(NA12878_RECALIBRATED_BAM, "", "29f1125df5ab27cc937a144ae08ac735");
}
// That problem bam came from a user on the forum and it spotted a problem where the ReadClipper
// was modifying the GATKSamRecord and that was screwing up the traversal engine from map call to
// map call. So the test is there for consistency but not for correctness. I'm not sure we can trust
// any of the calls in that region because it is so messy. The only thing I would maybe be worried about is
// that the three calls that are missing happen to all be the left most calls in the region
@Test
public void HCTestProblematicReadsModifiedInActiveRegions() {
final String base = String.format("-T HaplotypeCaller -R %s -I %s", REF, privateTestDir + "haplotype-problem-4.bam") + " --no_cmdline_in_header -o %s -minPruning 3 -L 4:49139026-49139965";
final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("2e8e6313228b0387008437feae7f5469"));
final WalkerTestSpec spec = new WalkerTestSpec(base, Arrays.asList("31db0a2d9eb07f86e0a89f0d97169072"));
executeTest("HCTestProblematicReadsModifiedInActiveRegions: ", spec);
}
@ -129,6 +157,14 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest {
executeTest("HCTestStructuralIndels: ", spec);
}
@Test
public void HCTestDoesNotFailOnBadRefBase() {
// don't care about the output - just want to make sure it doesn't fail
final String base = String.format("-T HaplotypeCaller -R %s -I %s", REF, privateTestDir + "NA12878.readsOverBadBase.chr3.bam") + " --no_cmdline_in_header -o /dev/null -L 3:60830000-60840000 --minPruning 3 -stand_call_conf 2 -stand_emit_conf 2";
final WalkerTestSpec spec = new WalkerTestSpec(base, Collections.<String>emptyList());
executeTest("HCTestDoesNotFailOnBadRefBase: ", spec);
}
// --------------------------------------------------------------------------------------------------------------
//
// testing reduced reads
@ -142,4 +178,12 @@ public class HaplotypeCallerIntegrationTest extends WalkerTest {
Arrays.asList("8a400b0c46f41447fcc35a907e34f384"));
executeTest("HC calling on a ReducedRead BAM", spec);
}
@Test
public void testReducedBamWithReadsNotFullySpanningDeletion() {
WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
"-T HaplotypeCaller -R " + b37KGReference + " --no_cmdline_in_header -I " + privateTestDir + "reduced.readNotFullySpanningDeletion.bam -o %s -L 1:167871297", 1,
Arrays.asList("4e8121dd9dc90478f237bd6ae4d19920"));
executeTest("test calling on a ReducedRead BAM where the reads do not fully span a deletion", spec);
}
}

View File

@ -111,8 +111,8 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
private Pair<VariantContext,VariantContext> getData1() {
Allele reference_A = Allele.create(BaseUtils.A,true);
Allele alt_C = Allele.create(BaseUtils.C);
Allele reference_A = Allele.create(BaseUtils.Base.A.base,true);
Allele alt_C = Allele.create(BaseUtils.Base.C.base);
Genotype sam_1_1_eval = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_A,reference_A));
Genotype sam_1_2_eval = GenotypeBuilder.create("test1_sample2", Arrays.asList(reference_A,alt_C));
@ -160,9 +160,9 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
private Pair<VariantContext,VariantContext> getData2() {
Allele reference_A = Allele.create(BaseUtils.A,true);
Allele alt_C = Allele.create(BaseUtils.C);
Allele alt_T = Allele.create(BaseUtils.T);
Allele reference_A = Allele.create(BaseUtils.Base.A.base,true);
Allele alt_C = Allele.create(BaseUtils.Base.C.base);
Allele alt_T = Allele.create(BaseUtils.Base.T.base);
Genotype sam_1_1_eval = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_A,reference_A));
Genotype sam_1_2_eval = GenotypeBuilder.create("test1_sample2", Arrays.asList(reference_A,alt_T));
@ -213,10 +213,10 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
private Pair<VariantContext,VariantContext> getData3() {
Allele reference_ACT = Allele.create(new byte[]{BaseUtils.A,BaseUtils.C,BaseUtils.T},true);
Allele alt_AC = Allele.create(new byte[]{BaseUtils.A,BaseUtils.C});
Allele alt_A = Allele.create(BaseUtils.A);
Allele alt_ATT = Allele.create(new byte[]{BaseUtils.A,BaseUtils.T,BaseUtils.T});
Allele reference_ACT = Allele.create(new byte[]{BaseUtils.Base.A.base,BaseUtils.Base.C.base,BaseUtils.Base.T.base},true);
Allele alt_AC = Allele.create(new byte[]{BaseUtils.Base.A.base,BaseUtils.Base.C.base});
Allele alt_A = Allele.create(BaseUtils.Base.A.base);
Allele alt_ATT = Allele.create(new byte[]{BaseUtils.Base.A.base,BaseUtils.Base.T.base,BaseUtils.Base.T.base});
Genotype sam_1_1_eval = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_ACT,alt_ATT));
Genotype sam_1_2_eval = GenotypeBuilder.create("test1_sample2", Arrays.asList(alt_A,alt_A));
@ -267,9 +267,9 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
private Pair<VariantContext,VariantContext> getData4() {
Allele reference_A = Allele.create(BaseUtils.A,true);
Allele alt_C = Allele.create(BaseUtils.C);
Allele alt_T = Allele.create(BaseUtils.T);
Allele reference_A = Allele.create(BaseUtils.Base.A.base,true);
Allele alt_C = Allele.create(BaseUtils.Base.C.base);
Allele alt_T = Allele.create(BaseUtils.Base.T.base);
Genotype sam_1_1_eval = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_A,reference_A));
Genotype sam_1_2_eval = GenotypeBuilder.create("test1_sample2", Arrays.asList(Allele.NO_CALL,Allele.NO_CALL));
@ -316,9 +316,9 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
private Pair<VariantContext,VariantContext> getData5() {
Allele reference_A = Allele.create(BaseUtils.A,true);
Allele alt_C = Allele.create(BaseUtils.C);
Allele alt_T = Allele.create(BaseUtils.T);
Allele reference_A = Allele.create(BaseUtils.Base.A.base,true);
Allele alt_C = Allele.create(BaseUtils.Base.C.base);
Allele alt_T = Allele.create(BaseUtils.Base.T.base);
Genotype sam_1_1_eval = GenotypeBuilder.create("test1_sample1", Arrays.asList(reference_A,reference_A));
Genotype sam_1_2_eval = GenotypeBuilder.create("test1_sample2", new ArrayList<Allele>(0));
@ -368,8 +368,8 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
private List<Pair<VariantContext,VariantContext>> getData6() {
Allele reference_A = Allele.create(BaseUtils.A,true);
Allele alt_C = Allele.create(BaseUtils.C);
Allele reference_A = Allele.create(BaseUtils.Base.A.base,true);
Allele alt_C = Allele.create(BaseUtils.Base.C.base);
// site 1 -
@ -396,8 +396,8 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
Pair<VariantContext,VariantContext> testDataSite1 = new Pair<VariantContext, VariantContext>(eval_1_builder.make(),truth_1_builder.make());
reference_A = Allele.create(BaseUtils.A,true);
Allele alt_T = Allele.create(BaseUtils.T);
reference_A = Allele.create(BaseUtils.Base.A.base,true);
Allele alt_T = Allele.create(BaseUtils.Base.T.base);
// site 2 -
// sample 1: no-call/hom-ref
@ -421,7 +421,7 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
Pair<VariantContext,VariantContext> testDataSite2 = new Pair<VariantContext, VariantContext>(eval_1_builder.make(),truth_1_builder.make());
Allele alt_G = Allele.create(BaseUtils.G);
Allele alt_G = Allele.create(BaseUtils.Base.G.base);
// site 3 -
// sample 1: alleles do not match
@ -605,10 +605,10 @@ public class ConcordanceMetricsUnitTest extends BaseTest {
public List<Pair<VariantContext,VariantContext>> getData7() {
Allele ref1 = Allele.create(BaseUtils.T,true);
Allele alt1 = Allele.create(BaseUtils.C);
Allele alt2 = Allele.create(BaseUtils.G);
Allele alt3 = Allele.create(BaseUtils.A);
Allele ref1 = Allele.create(BaseUtils.Base.T.base,true);
Allele alt1 = Allele.create(BaseUtils.Base.C.base);
Allele alt2 = Allele.create(BaseUtils.Base.G.base);
Allele alt3 = Allele.create(BaseUtils.Base.A.base);
GenomeLoc loc1 = genomeLocParser.createGenomeLoc("chr1",1,1);
VariantContextBuilder site1Eval = new VariantContextBuilder();

View File

@ -0,0 +1,118 @@
/*
* Copyright (c) 2010.
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.gatk.walkers.variantutils;
import org.broadinstitute.sting.WalkerTest;
import org.broadinstitute.sting.utils.exceptions.UserException;
import org.testng.annotations.Test;
import java.util.Arrays;
public class GenotypeConcordanceIntegrationTest extends WalkerTest {
protected static final String emptyMd5 = "d41d8cd98f00b204e9800998ecf8427e";
public static String baseTestString(String eval, String comp) {
return "-T GenotypeConcordance -R " + b37KGReference + " --eval " + validationDataLocation + eval + " --comp " + validationDataLocation + comp + " -o %s";
}
@Test
public void testIndelConcordance() {
WalkerTestSpec spec = new WalkerTestSpec(
baseTestString("NA12878.Jan2013.haplotypeCaller.subset.indels.vcf", "NA12878.Jan2013.bestPractices.subset.indels.vcf"),
0,
Arrays.asList("0f29a0c6dc44066228c8cb204fd53ec0")
);
executeTest("test indel concordance", spec);
}
@Test
public void testNonoverlapingSamples() {
WalkerTestSpec spec = new WalkerTestSpec(
baseTestString("GenotypeConcordanceNonOverlapTest_Eval.vcf", "GenotypeConcordanceNonOverlapTest_Comp.vcf"),
0,
Arrays.asList("fc725022d47b4b5f8a6ef87f0f1ffe89")
);
executeTest("test non-overlapping samples", spec);
}
@Test
public void testNonoverlappingSamplesMoltenized() {
WalkerTestSpec spec = new WalkerTestSpec(
baseTestString("GenotypeConcordanceNonOverlapTest_Eval.vcf", "GenotypeConcordanceNonOverlapTest_Comp.vcf") + " -moltenize",
0,
Arrays.asList("370141088362d0ab7054be5249c49c11")
);
executeTest("Test moltenized output",spec);
}
@Test
public void testMultipleRecordsPerSite() {
WalkerTestSpec spec = new WalkerTestSpec(
baseTestString("GenotypeConcordance.multipleRecordsTest1.eval.vcf","GenotypeConcordance.multipleRecordsTest1.comp.vcf"),
0,
Arrays.asList("352d59c4ac0cee5eb8ddbc9404b19ce9")
);
executeTest("test multiple records per site",spec);
}
@Test
public void testGQFilteringEval() {
WalkerTestSpec spec = new WalkerTestSpec(
baseTestString("genotypeConcordanceFilterTest.vcf","genotypeConcordanceFilterTest.vcf") + " -gfe 'GQ<30'",
0,
Arrays.asList("b7b495ccfa6d50a6be3e095d3f6d3c52")
);
executeTest("Test filtering on the EVAL rod",spec);
}
@Test
public void testFloatFilteringComp() {
WalkerTestSpec spec = new WalkerTestSpec(
baseTestString("genotypeConcordanceFilterTest.vcf","genotypeConcordanceFilterTest.vcf") + " -gfc 'LX<0.50'",
0,
Arrays.asList("6406b16cde7960b8943edf594303afd6")
);
executeTest("Test filtering on the COMP rod", spec);
}
@Test
public void testCombinedFilters() {
WalkerTestSpec spec = new WalkerTestSpec(
baseTestString("genotypeConcordanceFilterTest.vcf","genotypeConcordanceFilterTest.vcf") + " -gfc 'LX<0.52' -gfe 'DP<5' -gfe 'GQ<37'",
0,
Arrays.asList("26ffd06215b6177acce0ea9f35d73d31")
);
executeTest("Test filtering on both rods",spec);
}
}

View File

@ -0,0 +1,70 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.commandline;
import org.broad.tribble.Feature;
import org.broadinstitute.sting.utils.interval.IntervalMergingRule;
import org.broadinstitute.sting.utils.interval.IntervalSetRule;
import java.util.List;
public class IntervalArgumentCollection {
/**
* Using this option one can instruct the GATK engine to traverse over only part of the genome. This argument can be specified multiple times.
* One may use samtools-style intervals either explicitly (e.g. -L chr1 or -L chr1:100-200) or listed in a file (e.g. -L myFile.intervals).
* Additionally, one may specify a rod file to traverse over the positions for which there is a record in the file (e.g. -L file.vcf).
* To specify the completely unmapped reads in the BAM file (i.e. those without a reference contig) use -L unmapped.
*/
@Input(fullName = "intervals", shortName = "L", doc = "One or more genomic intervals over which to operate. Can be explicitly specified on the command line or in a file (including a rod file)", required = false)
public List<IntervalBinding<Feature>> intervals = null;
/**
* Using this option one can instruct the GATK engine NOT to traverse over certain parts of the genome. This argument can be specified multiple times.
* One may use samtools-style intervals either explicitly (e.g. -XL chr1 or -XL chr1:100-200) or listed in a file (e.g. -XL myFile.intervals).
* Additionally, one may specify a rod file to skip over the positions for which there is a record in the file (e.g. -XL file.vcf).
*/
@Input(fullName = "excludeIntervals", shortName = "XL", doc = "One or more genomic intervals to exclude from processing. Can be explicitly specified on the command line or in a file (including a rod file)", required = false)
public List<IntervalBinding<Feature>> excludeIntervals = null;
/**
* How should the intervals specified by multiple -L or -XL arguments be combined? Using this argument one can, for example, traverse over all of the positions
* for which there is a record in a VCF but just in chromosome 20 (-L chr20 -L file.vcf -isr INTERSECTION).
*/
@Argument(fullName = "interval_set_rule", shortName = "isr", doc = "Indicates the set merging approach the interval parser should use to combine the various -L or -XL inputs", required = false)
public IntervalSetRule intervalSetRule = IntervalSetRule.UNION;
/**
* Should abutting (but not overlapping) intervals be treated as separate intervals?
*/
@Argument(fullName = "interval_merging", shortName = "im", doc = "Indicates the interval merging rule we should use for abutting intervals", required = false)
public IntervalMergingRule intervalMerging = IntervalMergingRule.ALL;
/**
* For example, '-L chr1:100' with a padding value of 20 would turn into '-L chr1:80-120'.
*/
@Argument(fullName = "interval_padding", shortName = "ip", doc = "Indicates how many basepairs of padding to include around each of the intervals specified with the -L/--intervals argument", required = false)
public int intervalPadding = 0;
}

View File

@ -55,7 +55,6 @@ import org.broadinstitute.sting.gatk.samples.SampleDBBuilder;
import org.broadinstitute.sting.gatk.walkers.*;
import org.broadinstitute.sting.utils.*;
import org.broadinstitute.sting.utils.classloader.PluginManager;
import org.broadinstitute.sting.utils.collections.Pair;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.exceptions.UserException;
import org.broadinstitute.sting.utils.interval.IntervalUtils;
@ -361,7 +360,6 @@ public class GenomeAnalysisEngine {
* Returns a list of active, initialized read transformers
*
* @param walker the walker we need to apply read transformers too
* @return a non-null list of read transformers
*/
public void initializeReadTransformers(final Walker walker) {
final List<ReadTransformer> activeTransformers = new ArrayList<ReadTransformer>();
@ -672,41 +670,7 @@ public class GenomeAnalysisEngine {
* Setup the intervals to be processed
*/
protected void initializeIntervals() {
// return if no interval arguments at all
if ( argCollection.intervals == null && argCollection.excludeIntervals == null )
return;
// Note that the use of '-L all' is no longer supported.
// if include argument isn't given, create new set of all possible intervals
final Pair<GenomeLocSortedSet, GenomeLocSortedSet> includeExcludePair = IntervalUtils.parseIntervalBindingsPair(
this.referenceDataSource,
argCollection.intervals,
argCollection.intervalSetRule, argCollection.intervalMerging, argCollection.intervalPadding,
argCollection.excludeIntervals);
final GenomeLocSortedSet includeSortedSet = includeExcludePair.getFirst();
final GenomeLocSortedSet excludeSortedSet = includeExcludePair.getSecond();
// if no exclude arguments, can return parseIntervalArguments directly
if ( excludeSortedSet == null )
intervals = includeSortedSet;
// otherwise there are exclude arguments => must merge include and exclude GenomeLocSortedSets
else {
intervals = includeSortedSet.subtractRegions(excludeSortedSet);
// logging messages only printed when exclude (-XL) arguments are given
final long toPruneSize = includeSortedSet.coveredSize();
final long toExcludeSize = excludeSortedSet.coveredSize();
final long intervalSize = intervals.coveredSize();
logger.info(String.format("Initial include intervals span %d loci; exclude intervals span %d loci", toPruneSize, toExcludeSize));
logger.info(String.format("Excluding %d loci from original intervals (%.2f%% reduction)",
toPruneSize - intervalSize, (toPruneSize - intervalSize) / (0.01 * toPruneSize)));
}
logger.info(String.format("Processing %d bp from intervals", intervals.coveredSize()));
intervals = IntervalUtils.parseIntervalArguments(this.referenceDataSource, argCollection.intervalArguments);
}
/**
@ -842,6 +806,8 @@ public class GenomeAnalysisEngine {
if (argCollection.keepProgramRecords)
removeProgramRecords = false;
final boolean keepReadsInLIBS = walker instanceof ActiveRegionWalker;
return new SAMDataSource(
samReaderIDs,
threadAllocation,
@ -856,7 +822,8 @@ public class GenomeAnalysisEngine {
readTransformers,
includeReadsWithDeletionAtLoci(),
argCollection.defaultBaseQualities,
removeProgramRecords);
removeProgramRecords,
keepReadsInLIBS);
}
/**

View File

@ -61,6 +61,7 @@ public class ReadProperties {
private final ValidationExclusion exclusionList;
private final Collection<ReadFilter> supplementalFilters;
private final List<ReadTransformer> readTransformers;
private final boolean keepUniqueReadListInLIBS;
private final boolean includeReadsWithDeletionAtLoci;
private final boolean useOriginalBaseQualities;
private final byte defaultBaseQualities;
@ -74,6 +75,10 @@ public class ReadProperties {
return includeReadsWithDeletionAtLoci;
}
public boolean keepUniqueReadListInLIBS() {
return keepUniqueReadListInLIBS;
}
/**
* Gets a list of the files acting as sources of reads.
* @return A list of files storing reads data.
@ -161,6 +166,8 @@ public class ReadProperties {
* will explicitly list reads with deletion over the current reference base; otherwise, only observed
* bases will be seen in the pileups, and the deletions will be skipped silently.
* @param defaultBaseQualities if the reads have incomplete quality scores, set them all to defaultBaseQuality.
* @param keepUniqueReadListInLIBS If true, we will tell LocusIteratorByState to track the unique reads it sees
* This is really useful for ActiveRegionTraversals
*/
public ReadProperties( Collection<SAMReaderID> samFiles,
SAMFileHeader header,
@ -172,7 +179,8 @@ public class ReadProperties {
Collection<ReadFilter> supplementalFilters,
List<ReadTransformer> readTransformers,
boolean includeReadsWithDeletionAtLoci,
byte defaultBaseQualities) {
byte defaultBaseQualities,
final boolean keepUniqueReadListInLIBS) {
this.readers = samFiles;
this.header = header;
this.sortOrder = sortOrder;
@ -184,5 +192,6 @@ public class ReadProperties {
this.includeReadsWithDeletionAtLoci = includeReadsWithDeletionAtLoci;
this.useOriginalBaseQualities = useOriginalBaseQualities;
this.defaultBaseQualities = defaultBaseQualities;
this.keepUniqueReadListInLIBS = keepUniqueReadListInLIBS;
}
}

View File

@ -26,11 +26,7 @@
package org.broadinstitute.sting.gatk.arguments;
import net.sf.samtools.SAMFileReader;
import org.broad.tribble.Feature;
import org.broadinstitute.sting.commandline.Argument;
import org.broadinstitute.sting.commandline.Hidden;
import org.broadinstitute.sting.commandline.Input;
import org.broadinstitute.sting.commandline.IntervalBinding;
import org.broadinstitute.sting.commandline.*;
import org.broadinstitute.sting.gatk.GenomeAnalysisEngine;
import org.broadinstitute.sting.gatk.downsampling.DownsampleType;
import org.broadinstitute.sting.gatk.downsampling.DownsamplingMethod;
@ -38,8 +34,6 @@ import org.broadinstitute.sting.gatk.phonehome.GATKRunReport;
import org.broadinstitute.sting.gatk.samples.PedigreeValidationType;
import org.broadinstitute.sting.utils.QualityUtils;
import org.broadinstitute.sting.utils.baq.BAQ;
import org.broadinstitute.sting.utils.interval.IntervalMergingRule;
import org.broadinstitute.sting.utils.interval.IntervalSetRule;
import java.io.File;
import java.util.ArrayList;
@ -100,41 +94,8 @@ public class GATKArgumentCollection {
@Argument(fullName = "read_filter", shortName = "rf", doc = "Specify filtration criteria to apply to each read individually", required = false)
public List<String> readFilters = new ArrayList<String>();
/**
* Using this option one can instruct the GATK engine to traverse over only part of the genome. This argument can be specified multiple times.
* One may use samtools-style intervals either explicitly (e.g. -L chr1 or -L chr1:100-200) or listed in a file (e.g. -L myFile.intervals).
* Additionally, one may specify a rod file to traverse over the positions for which there is a record in the file (e.g. -L file.vcf).
* To specify the completely unmapped reads in the BAM file (i.e. those without a reference contig) use -L unmapped.
*/
@Input(fullName = "intervals", shortName = "L", doc = "One or more genomic intervals over which to operate. Can be explicitly specified on the command line or in a file (including a rod file)", required = false)
public List<IntervalBinding<Feature>> intervals = null;
/**
* Using this option one can instruct the GATK engine NOT to traverse over certain parts of the genome. This argument can be specified multiple times.
* One may use samtools-style intervals either explicitly (e.g. -XL chr1 or -XL chr1:100-200) or listed in a file (e.g. -XL myFile.intervals).
* Additionally, one may specify a rod file to skip over the positions for which there is a record in the file (e.g. -XL file.vcf).
*/
@Input(fullName = "excludeIntervals", shortName = "XL", doc = "One or more genomic intervals to exclude from processing. Can be explicitly specified on the command line or in a file (including a rod file)", required = false)
public List<IntervalBinding<Feature>> excludeIntervals = null;
/**
* How should the intervals specified by multiple -L or -XL arguments be combined? Using this argument one can, for example, traverse over all of the positions
* for which there is a record in a VCF but just in chromosome 20 (-L chr20 -L file.vcf -isr INTERSECTION).
*/
@Argument(fullName = "interval_set_rule", shortName = "isr", doc = "Indicates the set merging approach the interval parser should use to combine the various -L or -XL inputs", required = false)
public IntervalSetRule intervalSetRule = IntervalSetRule.UNION;
/**
* Should abutting (but not overlapping) intervals be treated as separate intervals?
*/
@Argument(fullName = "interval_merging", shortName = "im", doc = "Indicates the interval merging rule we should use for abutting intervals", required = false)
public IntervalMergingRule intervalMerging = IntervalMergingRule.ALL;
/**
* For example, '-L chr1:100' with a padding value of 20 would turn into '-L chr1:80-120'.
*/
@Argument(fullName = "interval_padding", shortName = "ip", doc = "Indicates how many basepairs of padding to include around each of the intervals specified with the -L/--intervals argument", required = false)
public int intervalPadding = 0;
@ArgumentCollection
public IntervalArgumentCollection intervalArguments = new IntervalArgumentCollection();
@Input(fullName = "reference_sequence", shortName = "R", doc = "Reference sequence file", required = false)
public File referenceFile = null;

View File

@ -29,7 +29,7 @@ import net.sf.picard.reference.IndexedFastaSequenceFile;
import org.broadinstitute.sting.gatk.ReadProperties;
import org.broadinstitute.sting.gatk.datasources.reads.Shard;
import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource;
import org.broadinstitute.sting.gatk.iterators.LocusIterator;
import org.broadinstitute.sting.utils.locusiterator.LocusIterator;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser;

View File

@ -28,9 +28,10 @@ package org.broadinstitute.sting.gatk.datasources.providers;
import org.broadinstitute.sting.gatk.downsampling.DownsampleType;
import org.broadinstitute.sting.gatk.ReadProperties;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.iterators.LocusIterator;
import org.broadinstitute.sting.utils.locusiterator.LocusIterator;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.locusiterator.LocusIteratorByState;
import java.util.Arrays;
import java.util.Collection;
@ -212,4 +213,10 @@ public abstract class LocusView extends LocusIterator implements View {
private boolean isContainedInShard(GenomeLoc location) {
return locus.containsP(location);
}
// TODO -- remove me
@Override
public LocusIteratorByState getLIBS() {
return loci.getLIBS();
}
}

View File

@ -77,13 +77,17 @@ public class ManagingReferenceOrderedView implements ReferenceOrderedView {
* @return A tracker containing information about this locus.
*/
public RefMetaDataTracker getReferenceOrderedDataAtLocus( GenomeLoc loc, ReferenceContext referenceContext ) {
List<RODRecordList> bindings = states.isEmpty() ? Collections.<RODRecordList>emptyList() : new ArrayList<RODRecordList>(states.size());
if ( states.isEmpty() )
return RefMetaDataTracker.EMPTY_TRACKER;
else {
List<RODRecordList> bindings = new ArrayList<RODRecordList>(states.size());
for ( ReferenceOrderedDataState state: states )
// todo -- warning, I removed the reference to the name from states
bindings.add( state.iterator.seekForward(loc) );
for ( ReferenceOrderedDataState state: states )
// todo -- warning, I removed the reference to the name from states
bindings.add( state.iterator.seekForward(loc) );
return new RefMetaDataTracker(bindings);
return new RefMetaDataTracker(bindings);
}
}
/**

View File

@ -158,6 +158,9 @@ public class SAMDataSource {
/**
* Create a new SAM data source given the supplied read metadata.
*
* For testing purposes
*
* @param samFiles list of reads files.
*/
public SAMDataSource(Collection<SAMReaderID> samFiles, ThreadAllocation threadAllocation, Integer numFileHandles, GenomeLocParser genomeLocParser) {
@ -177,6 +180,8 @@ public class SAMDataSource {
/**
* See complete constructor. Does not enable BAQ by default.
*
* For testing purposes
*/
public SAMDataSource(
Collection<SAMReaderID> samFiles,
@ -203,6 +208,7 @@ public class SAMDataSource {
Collections.<ReadTransformer>emptyList(),
includeReadsWithDeletionAtLoci,
(byte) -1,
false,
false);
}
@ -219,6 +225,7 @@ public class SAMDataSource {
* will explicitly list reads with deletion over the current reference base; otherwise, only observed
* bases will be seen in the pileups, and the deletions will be skipped silently.
* @param defaultBaseQualities if the reads have incomplete quality scores, set them all to defaultBaseQuality.
* @param keepReadsInLIBS should we keep a unique list of reads in LIBS?
*/
public SAMDataSource(
Collection<SAMReaderID> samFiles,
@ -234,7 +241,8 @@ public class SAMDataSource {
List<ReadTransformer> readTransformers,
boolean includeReadsWithDeletionAtLoci,
byte defaultBaseQualities,
boolean removeProgramRecords) {
boolean removeProgramRecords,
final boolean keepReadsInLIBS) {
this.readMetrics = new ReadMetrics();
this.genomeLocParser = genomeLocParser;
@ -306,7 +314,8 @@ public class SAMDataSource {
supplementalFilters,
readTransformers,
includeReadsWithDeletionAtLoci,
defaultBaseQualities);
defaultBaseQualities,
keepReadsInLIBS);
// cache the read group id (original) -> read group id (merged)
// and read group id (merged) -> read group id (original) mappings.

View File

@ -95,7 +95,10 @@ public abstract class Shard implements HasGenomeLocation {
*/
private final Map<SAMReaderID,SAMFileSpan> fileSpans;
/**
* Lazy-calculated span of all of the genome locs in this shard
*/
private GenomeLoc spanningLocation = null;
/**
* Statistics about which reads in this shards were used and which were filtered away.
@ -148,27 +151,34 @@ public abstract class Shard implements HasGenomeLocation {
/**
* Returns the span of the genomeLocs comprising this shard
* @param
* @return
* @return a GenomeLoc that starts as the first position in getGenomeLocs() and stops at the stop of the last
* position in getGenomeLocs()
*/
public GenomeLoc getLocation() {
if ( getGenomeLocs() == null )
return GenomeLoc.WHOLE_GENOME;
if ( spanningLocation == null ) {
if ( getGenomeLocs() == null )
spanningLocation = GenomeLoc.WHOLE_GENOME;
else if ( getGenomeLocs().size() == 0 ) {
spanningLocation = getGenomeLocs().get(0);
} else {
int start = Integer.MAX_VALUE;
int stop = Integer.MIN_VALUE;
String contig = null;
int start = Integer.MAX_VALUE;
int stop = Integer.MIN_VALUE;
String contig = null;
for ( GenomeLoc loc : getGenomeLocs() ) {
if ( GenomeLoc.isUnmapped(loc) )
// special case the unmapped region marker, just abort out
return loc;
contig = loc.getContig();
if ( loc.getStart() < start ) start = loc.getStart();
if ( loc.getStop() > stop ) stop = loc.getStop();
}
for ( GenomeLoc loc : getGenomeLocs() ) {
if ( GenomeLoc.isUnmapped(loc) )
// special case the unmapped region marker, just abort out
return loc;
contig = loc.getContig();
if ( loc.getStart() < start ) start = loc.getStart();
if ( loc.getStop() > stop ) stop = loc.getStop();
spanningLocation = parser.createGenomeLoc(contig, start, stop);
}
}
return parser.createGenomeLoc(contig, start, stop);
return spanningLocation;
}

View File

@ -27,8 +27,8 @@ package org.broadinstitute.sting.gatk.downsampling;
import net.sf.samtools.SAMRecord;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
/**
@ -41,7 +41,7 @@ import java.util.List;
*/
public class PassThroughDownsampler<T extends SAMRecord> implements ReadsDownsampler<T> {
private ArrayList<T> selectedReads;
private LinkedList<T> selectedReads;
public PassThroughDownsampler() {
clear();
@ -59,9 +59,13 @@ public class PassThroughDownsampler<T extends SAMRecord> implements ReadsDownsam
}
public boolean hasFinalizedItems() {
return selectedReads.size() > 0;
return ! selectedReads.isEmpty();
}
/**
* Note that this list is a linked list and so doesn't support fast random access
* @return
*/
public List<T> consumeFinalizedItems() {
// pass by reference rather than make a copy, for speed
List<T> downsampledItems = selectedReads;
@ -74,7 +78,7 @@ public class PassThroughDownsampler<T extends SAMRecord> implements ReadsDownsam
}
public T peekFinalized() {
return selectedReads.isEmpty() ? null : selectedReads.get(0);
return selectedReads.isEmpty() ? null : selectedReads.getFirst();
}
public T peekPending() {
@ -90,7 +94,7 @@ public class PassThroughDownsampler<T extends SAMRecord> implements ReadsDownsam
}
public void clear() {
selectedReads = new ArrayList<T>();
selectedReads = new LinkedList<T>();
}
public void reset() {

View File

@ -29,9 +29,7 @@ import net.sf.samtools.SAMRecord;
import org.broadinstitute.sting.gatk.GenomeAnalysisEngine;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.*;
/**
* Reservoir Downsampler: Selects n reads out of a stream whose size is not known in advance, with
@ -42,10 +40,25 @@ import java.util.List;
* @author David Roazen
*/
public class ReservoirDownsampler<T extends SAMRecord> implements ReadsDownsampler<T> {
private final int targetSampleSize;
private ArrayList<T> reservoir;
/**
* if true, this downsampler will be optimized for the case
* where most of the time we won't fill up anything like the
* targetSampleSize elements. If this is false, we will allocate
* internal buffers to targetSampleSize initially, which minimizes
* the cost of allocation if we often use targetSampleSize or more
* elements.
*/
private final boolean expectFewOverflows;
private int targetSampleSize;
/**
* At times this can be a linked list or an array list, depending on how we're accessing the
* data and whether or not we're expecting few overflows
*/
private List<T> reservoir;
private boolean isLinkedList;
private int totalReadsSeen;
@ -56,17 +69,35 @@ public class ReservoirDownsampler<T extends SAMRecord> implements ReadsDownsampl
*
* @param targetSampleSize Size of the reservoir used by this downsampler. Number of items retained
* after downsampling will be min(totalReads, targetSampleSize)
* @param expectFewOverflows if true, this downsampler will be optimized for the case
* where most of the time we won't fill up anything like the
* targetSampleSize elements. If this is false, we will allocate
* internal buffers to targetSampleSize initially, which minimizes
* the cost of allocation if we often use targetSampleSize or more
* elements.
*/
public ReservoirDownsampler ( int targetSampleSize ) {
public ReservoirDownsampler ( final int targetSampleSize, final boolean expectFewOverflows) {
if ( targetSampleSize <= 0 ) {
throw new ReviewedStingException("Cannot do reservoir downsampling with a sample size <= 0");
}
this.targetSampleSize = targetSampleSize;
this.expectFewOverflows = expectFewOverflows;
clear();
reset();
}
/**
* Construct a ReservoirDownsampler
*
* @param targetSampleSize Size of the reservoir used by this downsampler. Number of items retained
* after downsampling will be min(totalReads, targetSampleSize)
*/
public ReservoirDownsampler ( int targetSampleSize ) {
this(targetSampleSize, false);
}
public void submit ( T newRead ) {
totalReadsSeen++;
@ -74,7 +105,12 @@ public class ReservoirDownsampler<T extends SAMRecord> implements ReadsDownsampl
reservoir.add(newRead);
}
else {
int randomSlot = GenomeAnalysisEngine.getRandomGenerator().nextInt(totalReadsSeen);
if ( isLinkedList ) {
reservoir = new ArrayList<T>(reservoir);
isLinkedList = false;
}
final int randomSlot = GenomeAnalysisEngine.getRandomGenerator().nextInt(totalReadsSeen);
if ( randomSlot < targetSampleSize ) {
reservoir.set(randomSlot, newRead);
}
@ -93,10 +129,15 @@ public class ReservoirDownsampler<T extends SAMRecord> implements ReadsDownsampl
}
public List<T> consumeFinalizedItems() {
// pass by reference rather than make a copy, for speed
List<T> downsampledItems = reservoir;
clear();
return downsampledItems;
if ( reservoir.isEmpty() ) {
// if there's nothing here, don't both allocating a new list completely
return Collections.emptyList();
} else {
// pass by reference rather than make a copy, for speed
List<T> downsampledItems = reservoir;
clear();
return downsampledItems;
}
}
public boolean hasPendingItems() {
@ -119,9 +160,18 @@ public class ReservoirDownsampler<T extends SAMRecord> implements ReadsDownsampl
// NO-OP
}
/**
* Clear the data structures used to hold information
*/
public void clear() {
reservoir = new ArrayList<T>(targetSampleSize);
totalReadsSeen = 0; // an internal stat used by the downsampling process, so not cleared by reset() below
// if we aren't expecting many overflows, allocate a linked list not an arraylist
reservoir = expectFewOverflows ? new LinkedList<T>() : new ArrayList<T>(targetSampleSize);
// it's a linked list if we allocate one
isLinkedList = expectFewOverflows;
// an internal stat used by the downsampling process, so not cleared by reset() below
totalReadsSeen = 0;
}
public void reset() {

View File

@ -114,7 +114,7 @@ public class LinearMicroScheduler extends MicroScheduler {
}
// Special function call to empty out the work queue. Ugly for now but will be cleaned up when we eventually push this functionality more into the engine
if( traversalEngine instanceof TraverseActiveRegions ) {
if( traversalEngine instanceof TraverseActiveRegions) {
final Object result = ((TraverseActiveRegions) traversalEngine).endTraversal(walker, accumulator.getReduceInit());
accumulator.accumulate(null, result); // Assumes only used with StandardAccumulator
}

View File

@ -213,7 +213,7 @@ public abstract class MicroScheduler implements MicroSchedulerMBean {
// Now that we have a progress meter, go through and initialize the traversal engines
for ( final TraversalEngine traversalEngine : allCreatedTraversalEngines )
traversalEngine.initialize(engine, progressMeter);
traversalEngine.initialize(engine, walker, progressMeter);
// JMX does not allow multiple instances with the same ObjectName to be registered with the same platform MXBean.
// To get around this limitation and since we have no job identifier at this point, register a simple counter that

View File

@ -29,13 +29,14 @@ import net.sf.picard.util.PeekableIterator;
import org.broadinstitute.sting.gatk.ReadProperties;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.datasources.reads.Shard;
import org.broadinstitute.sting.gatk.iterators.LegacyLocusIteratorByState;
import org.broadinstitute.sting.gatk.iterators.LocusIterator;
import org.broadinstitute.sting.gatk.iterators.LocusIteratorByState;
import org.broadinstitute.sting.gatk.iterators.GATKSAMIterator;
import org.broadinstitute.sting.gatk.iterators.StingSAMIterator;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.locusiterator.LocusIterator;
import org.broadinstitute.sting.utils.locusiterator.LocusIteratorByState;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import java.util.Collection;
import java.util.Iterator;
@ -71,7 +72,7 @@ public class WindowMaker implements Iterable<WindowMaker.WindowMakerIterator>, I
/**
* Hold the read iterator so that it can be closed later.
*/
private final StingSAMIterator readIterator;
private final GATKSAMIterator readIterator;
/**
* The data source for reads. Will probably come directly from the BAM file.
@ -104,22 +105,23 @@ public class WindowMaker implements Iterable<WindowMaker.WindowMakerIterator>, I
* @param sampleNames The complete set of sample names in the reads in shard
*/
private final LocusIteratorByState libs;
public WindowMaker(Shard shard, GenomeLocParser genomeLocParser, StingSAMIterator iterator, List<GenomeLoc> intervals, Collection<String> sampleNames) {
this.sourceInfo = shard.getReadProperties();
this.readIterator = iterator;
this.readIterator = new GATKSAMIterator(iterator);
// Use the legacy version of LocusIteratorByState if legacy downsampling was requested:
this.sourceIterator = sourceInfo.getDownsamplingMethod().useLegacyDownsampler ?
new PeekableIterator<AlignmentContext>(new LegacyLocusIteratorByState(iterator,sourceInfo,genomeLocParser,sampleNames))
:
new PeekableIterator<AlignmentContext>(new LocusIteratorByState(iterator,sourceInfo,genomeLocParser,sampleNames));
if ( sourceInfo.getDownsamplingMethod().useLegacyDownsampler )
throw new IllegalArgumentException("legacy downsampler no longer supported in the window maker");
this.libs = new LocusIteratorByState(readIterator,sourceInfo,genomeLocParser,sampleNames);
this.sourceIterator = new PeekableIterator<AlignmentContext>(libs);
this.intervalIterator = intervals.size()>0 ? new PeekableIterator<GenomeLoc>(intervals.iterator()) : null;
}
public WindowMaker(Shard shard, GenomeLocParser genomeLocParser, StingSAMIterator iterator, List<GenomeLoc> intervals ) {
this(shard, genomeLocParser, iterator, intervals, LegacyLocusIteratorByState.sampleListForSAMWithoutReadGroups());
this(shard, genomeLocParser, iterator, intervals, LocusIteratorByState.sampleListForSAMWithoutReadGroups());
}
public Iterator<WindowMakerIterator> iterator() {
@ -209,5 +211,10 @@ public class WindowMaker implements Iterable<WindowMaker.WindowMakerIterator>, I
throw new ReviewedStingException("BUG: filtering locus does not contain, is not before, and is not past the given alignment context");
}
}
@Override
public LocusIteratorByState getLIBS() {
return libs;
}
}
}

View File

@ -0,0 +1,57 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.gatk.iterators;
import net.sf.samtools.SAMRecord;
import net.sf.samtools.util.CloseableIterator;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import java.util.Iterator;
/**
* Temporarily hack to convert SAMRecords to GATKSAMRecords
*
* User: depristo
* Date: 1/11/13
* Time: 1:19 PM
*/
public class GATKSAMIterator implements CloseableIterator<GATKSAMRecord>, Iterable<GATKSAMRecord> {
final CloseableIterator<SAMRecord> it;
public GATKSAMIterator(final CloseableIterator<SAMRecord> it) {
this.it = it;
}
public GATKSAMIterator(final StingSAMIterator it) {
this.it = it;
}
@Override public boolean hasNext() { return it.hasNext(); }
@Override public GATKSAMRecord next() { return (GATKSAMRecord)it.next(); }
@Override public void remove() { it.remove(); }
@Override public void close() { it.close(); }
@Override public Iterator<GATKSAMRecord> iterator() { return this; }
}

View File

@ -1,962 +0,0 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.gatk.iterators;
import net.sf.picard.util.PeekableIterator;
import net.sf.samtools.Cigar;
import net.sf.samtools.CigarElement;
import net.sf.samtools.CigarOperator;
import net.sf.samtools.SAMRecord;
import org.apache.log4j.Logger;
import org.broadinstitute.sting.gatk.downsampling.DownsampleType;
import org.broadinstitute.sting.gatk.downsampling.DownsamplingMethod;
import org.broadinstitute.sting.gatk.ReadProperties;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.MathUtils;
import org.broadinstitute.sting.utils.LegacyReservoirDownsampler;
import org.broadinstitute.sting.utils.exceptions.UserException;
import org.broadinstitute.sting.utils.pileup.PileupElement;
import org.broadinstitute.sting.utils.pileup.ReadBackedPileupImpl;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import org.broadinstitute.sting.utils.sam.ReadUtils;
import java.util.*;
/**
* Iterator that traverses a SAM File, accumulating information on a per-locus basis
*/
public class LegacyLocusIteratorByState extends LocusIterator {
/**
* our log, which we want to capture anything from this class
*/
private static Logger logger = Logger.getLogger(LegacyLocusIteratorByState.class);
// -----------------------------------------------------------------------------------------------------------------
//
// member fields
//
// -----------------------------------------------------------------------------------------------------------------
/**
* Used to create new GenomeLocs.
*/
private final GenomeLocParser genomeLocParser;
private final ArrayList<String> samples;
private final ReadStateManager readStates;
static private class SAMRecordState {
SAMRecord read;
int readOffset = -1; // how far are we offset from the start of the read bases?
int genomeOffset = -1; // how far are we offset from the alignment start on the genome?
Cigar cigar = null;
int cigarOffset = -1;
CigarElement curElement = null;
int nCigarElements = 0;
int cigarElementCounter = -1; // how far are we into a single cigarElement
// The logical model for generating extended events is as follows: the "record state" implements the traversal
// along the reference; thus stepForwardOnGenome() returns on every and only on actual reference bases. This
// can be a (mis)match or a deletion (in the latter case, we still return on every individual reference base the
// deletion spans). In the extended events mode, the record state also remembers if there was an insertion, or
// if the deletion just started *right before* the current reference base the record state is pointing to upon the return from
// stepForwardOnGenome(). The next call to stepForwardOnGenome() will clear that memory (as we remember only extended
// events immediately preceding the current reference base).
public SAMRecordState(SAMRecord read) {
this.read = read;
cigar = read.getCigar();
nCigarElements = cigar.numCigarElements();
//System.out.printf("Creating a SAMRecordState: %s%n", this);
}
public SAMRecord getRead() {
return read;
}
/**
* What is our current offset in the read's bases that aligns us with the reference genome?
*
* @return
*/
public int getReadOffset() {
return readOffset;
}
/**
* What is the current offset w.r.t. the alignment state that aligns us to the readOffset?
*
* @return
*/
public int getGenomeOffset() {
return genomeOffset;
}
public int getGenomePosition() {
return read.getAlignmentStart() + getGenomeOffset();
}
public GenomeLoc getLocation(GenomeLocParser genomeLocParser) {
return genomeLocParser.createGenomeLoc(read.getReferenceName(), getGenomePosition());
}
public CigarOperator getCurrentCigarOperator() {
return curElement.getOperator();
}
public String toString() {
return String.format("%s ro=%d go=%d co=%d cec=%d %s", read.getReadName(), readOffset, genomeOffset, cigarOffset, cigarElementCounter, curElement);
}
public CigarElement peekForwardOnGenome() {
return ( cigarElementCounter + 1 > curElement.getLength() && cigarOffset + 1 < nCigarElements ? cigar.getCigarElement(cigarOffset + 1) : curElement );
}
public CigarElement peekBackwardOnGenome() {
return ( cigarElementCounter - 1 == 0 && cigarOffset - 1 > 0 ? cigar.getCigarElement(cigarOffset - 1) : curElement );
}
public CigarOperator stepForwardOnGenome() {
// we enter this method with readOffset = index of the last processed base on the read
// (-1 if we did not process a single base yet); this can be last matching base, or last base of an insertion
if (curElement == null || ++cigarElementCounter > curElement.getLength()) {
cigarOffset++;
if (cigarOffset < nCigarElements) {
curElement = cigar.getCigarElement(cigarOffset);
cigarElementCounter = 0;
// next line: guards against cigar elements of length 0; when new cigar element is retrieved,
// we reenter in order to re-check cigarElementCounter against curElement's length
return stepForwardOnGenome();
} else {
if (curElement != null && curElement.getOperator() == CigarOperator.D)
throw new UserException.MalformedBAM(read, "read ends with deletion. Cigar: " + read.getCigarString() + ". Although the SAM spec technically permits such reads, this is often indicative of malformed files. If you are sure you want to use this file, re-run your analysis with the extra option: -rf BadCigar");
// Reads that contain indels model the genomeOffset as the following base in the reference. Because
// we fall into this else block only when indels end the read, increment genomeOffset such that the
// current offset of this read is the next ref base after the end of the indel. This position will
// model a point on the reference somewhere after the end of the read.
genomeOffset++; // extended events need that. Logically, it's legal to advance the genomic offset here:
// we do step forward on the ref, and by returning null we also indicate that we are past the read end.
return null;
}
}
boolean done = false;
switch (curElement.getOperator()) {
case H: // ignore hard clips
case P: // ignore pads
cigarElementCounter = curElement.getLength();
break;
case I: // insertion w.r.t. the reference
case S: // soft clip
cigarElementCounter = curElement.getLength();
readOffset += curElement.getLength();
break;
case D: // deletion w.r.t. the reference
if (readOffset < 0) // we don't want reads starting with deletion, this is a malformed cigar string
throw new UserException.MalformedBAM(read, "read starts with deletion. Cigar: " + read.getCigarString() + ". Although the SAM spec technically permits such reads, this is often indicative of malformed files. If you are sure you want to use this file, re-run your analysis with the extra option: -rf BadCigar");
// should be the same as N case
genomeOffset++;
done = true;
break;
case N: // reference skip (looks and gets processed just like a "deletion", just different logical meaning)
genomeOffset++;
done = true;
break;
case M:
case EQ:
case X:
readOffset++;
genomeOffset++;
done = true;
break;
default:
throw new IllegalStateException("Case statement didn't deal with cigar op: " + curElement.getOperator());
}
return done ? curElement.getOperator() : stepForwardOnGenome();
}
}
//final boolean DEBUG = false;
//final boolean DEBUG2 = false && DEBUG;
private ReadProperties readInfo;
private AlignmentContext nextAlignmentContext;
// -----------------------------------------------------------------------------------------------------------------
//
// constructors and other basic operations
//
// -----------------------------------------------------------------------------------------------------------------
public LegacyLocusIteratorByState(final Iterator<SAMRecord> samIterator, ReadProperties readInformation, GenomeLocParser genomeLocParser, Collection<String> samples) {
this.readInfo = readInformation;
this.genomeLocParser = genomeLocParser;
this.samples = new ArrayList<String>(samples);
this.readStates = new ReadStateManager(samIterator, readInformation.getDownsamplingMethod());
// currently the GATK expects this LocusIteratorByState to accept empty sample lists, when
// there's no read data. So we need to throw this error only when samIterator.hasNext() is true
if (this.samples.isEmpty() && samIterator.hasNext()) {
throw new IllegalArgumentException("samples list must not be empty");
}
}
/**
* For testing only. Assumes that the incoming SAMRecords have no read groups, so creates a dummy sample list
* for the system.
*/
public final static Collection<String> sampleListForSAMWithoutReadGroups() {
List<String> samples = new ArrayList<String>();
samples.add(null);
return samples;
}
public Iterator<AlignmentContext> iterator() {
return this;
}
public void close() {
//this.it.close();
}
public boolean hasNext() {
lazyLoadNextAlignmentContext();
return (nextAlignmentContext != null);
//if ( DEBUG ) System.out.printf("hasNext() = %b%n", r);
}
private GenomeLoc getLocation() {
return readStates.isEmpty() ? null : readStates.getFirst().getLocation(genomeLocParser);
}
// -----------------------------------------------------------------------------------------------------------------
//
// next() routine and associated collection operations
//
// -----------------------------------------------------------------------------------------------------------------
public AlignmentContext next() {
lazyLoadNextAlignmentContext();
if (!hasNext())
throw new NoSuchElementException("LocusIteratorByState: out of elements.");
AlignmentContext currentAlignmentContext = nextAlignmentContext;
nextAlignmentContext = null;
return currentAlignmentContext;
}
/**
* Creates the next alignment context from the given state. Note that this is implemented as a lazy load method.
* nextAlignmentContext MUST BE null in order for this method to advance to the next entry.
*/
private void lazyLoadNextAlignmentContext() {
while (nextAlignmentContext == null && readStates.hasNext()) {
readStates.collectPendingReads();
final GenomeLoc location = getLocation();
final Map<String, ReadBackedPileupImpl> fullPileup = new HashMap<String, ReadBackedPileupImpl>();
boolean hasBeenSampled = false;
for (final String sample : samples) {
final Iterator<SAMRecordState> iterator = readStates.iterator(sample);
final List<PileupElement> pile = new ArrayList<PileupElement>(readStates.size(sample));
hasBeenSampled |= location.getStart() <= readStates.getDownsamplingExtent(sample);
int size = 0; // number of elements in this sample's pileup
int nDeletions = 0; // number of deletions in this sample's pileup
int nMQ0Reads = 0; // number of MQ0 reads in this sample's pileup (warning: current implementation includes N bases that are MQ0)
while (iterator.hasNext()) {
final SAMRecordState state = iterator.next(); // state object with the read/offset information
final GATKSAMRecord read = (GATKSAMRecord) state.getRead(); // the actual read
final CigarOperator op = state.getCurrentCigarOperator(); // current cigar operator
final CigarElement nextElement = state.peekForwardOnGenome(); // next cigar element
final CigarElement lastElement = state.peekBackwardOnGenome(); // last cigar element
final boolean isSingleElementCigar = nextElement == lastElement;
final CigarOperator nextOp = nextElement.getOperator(); // next cigar operator
final CigarOperator lastOp = lastElement.getOperator(); // last cigar operator
int readOffset = state.getReadOffset(); // the base offset on this read
final boolean isBeforeDeletion = nextOp == CigarOperator.DELETION;
final boolean isAfterDeletion = lastOp == CigarOperator.DELETION;
final boolean isBeforeInsertion = nextOp == CigarOperator.INSERTION;
final boolean isAfterInsertion = lastOp == CigarOperator.INSERTION && !isSingleElementCigar;
final boolean isNextToSoftClip = nextOp == CigarOperator.S || (state.getGenomeOffset() == 0 && read.getSoftStart() != read.getAlignmentStart());
int nextElementLength = nextElement.getLength();
if (op == CigarOperator.N) // N's are never added to any pileup
continue;
if (op == CigarOperator.D) {
// TODO -- LIBS is totally busted for deletions so that reads with Ds right before Is in their CIGAR are broken; must fix
if (readInfo.includeReadsWithDeletionAtLoci()) { // only add deletions to the pileup if we are authorized to do so
pile.add(new PileupElement(read, readOffset, true, isBeforeDeletion, isAfterDeletion, isBeforeInsertion, isAfterInsertion, isNextToSoftClip, null, nextOp == CigarOperator.D ? nextElementLength : -1));
size++;
nDeletions++;
if (read.getMappingQuality() == 0)
nMQ0Reads++;
}
}
else {
if (!filterBaseInRead(read, location.getStart())) {
String insertedBaseString = null;
if (nextOp == CigarOperator.I) {
final int insertionOffset = isSingleElementCigar ? 0 : 1;
// TODO -- someone please implement a better fix for the single element insertion CIGAR!
if (isSingleElementCigar)
readOffset -= (nextElement.getLength() - 1); // LIBS has passed over the insertion bases!
insertedBaseString = new String(Arrays.copyOfRange(read.getReadBases(), readOffset + insertionOffset, readOffset + insertionOffset + nextElement.getLength()));
}
pile.add(new PileupElement(read, readOffset, false, isBeforeDeletion, isAfterDeletion, isBeforeInsertion, isAfterInsertion, isNextToSoftClip, insertedBaseString, nextElementLength));
size++;
if (read.getMappingQuality() == 0)
nMQ0Reads++;
}
}
}
if (pile.size() != 0) // if this pileup added at least one base, add it to the full pileup
fullPileup.put(sample, new ReadBackedPileupImpl(location, pile, size, nDeletions, nMQ0Reads));
}
updateReadStates(); // critical - must be called after we get the current state offsets and location
if (!fullPileup.isEmpty()) // if we got reads with non-D/N over the current position, we are done
nextAlignmentContext = new AlignmentContext(location, new ReadBackedPileupImpl(location, fullPileup), hasBeenSampled);
}
}
// fast testing of position
private boolean readIsPastCurrentPosition(SAMRecord read) {
if (readStates.isEmpty())
return false;
else {
SAMRecordState state = readStates.getFirst();
SAMRecord ourRead = state.getRead();
return read.getReferenceIndex() > ourRead.getReferenceIndex() || read.getAlignmentStart() > state.getGenomePosition();
}
}
/**
* Generic place to put per-base filters appropriate to LocusIteratorByState
*
* @param rec
* @param pos
* @return
*/
private static boolean filterBaseInRead(GATKSAMRecord rec, long pos) {
return ReadUtils.isBaseInsideAdaptor(rec, pos);
}
private void updateReadStates() {
for (final String sample : samples) {
Iterator<SAMRecordState> it = readStates.iterator(sample);
while (it.hasNext()) {
SAMRecordState state = it.next();
CigarOperator op = state.stepForwardOnGenome();
if (op == null) {
// we discard the read only when we are past its end AND indel at the end of the read (if any) was
// already processed. Keeping the read state that returned null upon stepForwardOnGenome() is safe
// as the next call to stepForwardOnGenome() will return null again AND will clear hadIndel() flag.
it.remove(); // we've stepped off the end of the object
}
}
}
}
public void remove() {
throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
}
private class ReadStateManager {
private final PeekableIterator<SAMRecord> iterator;
private final DownsamplingMethod downsamplingMethod;
private final SamplePartitioner samplePartitioner;
private final Map<String, PerSampleReadStateManager> readStatesBySample = new HashMap<String, PerSampleReadStateManager>();
private final int targetCoverage;
private int totalReadStates = 0;
public ReadStateManager(Iterator<SAMRecord> source, DownsamplingMethod downsamplingMethod) {
this.iterator = new PeekableIterator<SAMRecord>(source);
this.downsamplingMethod = downsamplingMethod.type != null ? downsamplingMethod : DownsamplingMethod.NONE;
switch (this.downsamplingMethod.type) {
case BY_SAMPLE:
if (downsamplingMethod.toCoverage == null)
throw new UserException.BadArgumentValue("dcov", "Downsampling coverage (-dcov) must be specified when downsampling by sample");
this.targetCoverage = downsamplingMethod.toCoverage;
break;
default:
this.targetCoverage = Integer.MAX_VALUE;
}
Map<String, ReadSelector> readSelectors = new HashMap<String, ReadSelector>();
for (final String sample : samples) {
readStatesBySample.put(sample, new PerSampleReadStateManager());
readSelectors.put(sample, downsamplingMethod.type == DownsampleType.BY_SAMPLE ? new NRandomReadSelector(null, targetCoverage) : new AllReadsSelector());
}
samplePartitioner = new SamplePartitioner(readSelectors);
}
/**
* Returns a iterator over all the reads associated with the given sample. Note that remove() is implemented
* for this iterator; if present, total read states will be decremented.
*
* @param sample The sample.
* @return Iterator over the reads associated with that sample.
*/
public Iterator<SAMRecordState> iterator(final String sample) {
return new Iterator<SAMRecordState>() {
private Iterator<SAMRecordState> wrappedIterator = readStatesBySample.get(sample).iterator();
public boolean hasNext() {
return wrappedIterator.hasNext();
}
public SAMRecordState next() {
return wrappedIterator.next();
}
public void remove() {
wrappedIterator.remove();
totalReadStates--;
}
};
}
public boolean isEmpty() {
return totalReadStates == 0;
}
/**
* Retrieves the total number of reads in the manager across all samples.
*
* @return Total number of reads over all samples.
*/
public int size() {
return totalReadStates;
}
/**
* Retrieves the total number of reads in the manager in the given sample.
*
* @param sample The sample.
* @return Total number of reads in the given sample.
*/
public int size(final String sample) {
return readStatesBySample.get(sample).size();
}
/**
* The extent of downsampling; basically, the furthest base out which has 'fallen
* victim' to the downsampler.
*
* @param sample Sample, downsampled independently.
* @return Integer stop of the furthest undownsampled region.
*/
public int getDownsamplingExtent(final String sample) {
return readStatesBySample.get(sample).getDownsamplingExtent();
}
public SAMRecordState getFirst() {
for (final String sample : samples) {
PerSampleReadStateManager reads = readStatesBySample.get(sample);
if (!reads.isEmpty())
return reads.peek();
}
return null;
}
public boolean hasNext() {
return totalReadStates > 0 || iterator.hasNext();
}
public void collectPendingReads() {
if (!iterator.hasNext())
return;
if (readStates.size() == 0) {
int firstContigIndex = iterator.peek().getReferenceIndex();
int firstAlignmentStart = iterator.peek().getAlignmentStart();
while (iterator.hasNext() && iterator.peek().getReferenceIndex() == firstContigIndex && iterator.peek().getAlignmentStart() == firstAlignmentStart) {
samplePartitioner.submitRead(iterator.next());
}
} else {
// Fast fail in the case that the read is past the current position.
if (readIsPastCurrentPosition(iterator.peek()))
return;
while (iterator.hasNext() && !readIsPastCurrentPosition(iterator.peek())) {
samplePartitioner.submitRead(iterator.next());
}
}
samplePartitioner.complete();
for (final String sample : samples) {
ReadSelector aggregator = samplePartitioner.getSelectedReads(sample);
Collection<SAMRecord> newReads = new ArrayList<SAMRecord>(aggregator.getSelectedReads());
PerSampleReadStateManager statesBySample = readStatesBySample.get(sample);
int numReads = statesBySample.size();
int downsamplingExtent = aggregator.getDownsamplingExtent();
if (numReads + newReads.size() <= targetCoverage || downsamplingMethod.type == DownsampleType.NONE) {
long readLimit = aggregator.getNumReadsSeen();
addReadsToSample(statesBySample, newReads, readLimit);
statesBySample.specifyNewDownsamplingExtent(downsamplingExtent);
} else {
int[] counts = statesBySample.getCountsPerAlignmentStart();
int[] updatedCounts = new int[counts.length];
System.arraycopy(counts, 0, updatedCounts, 0, counts.length);
boolean readPruned = true;
while (numReads + newReads.size() > targetCoverage && readPruned) {
readPruned = false;
for (int alignmentStart = updatedCounts.length - 1; numReads + newReads.size() > targetCoverage && alignmentStart >= 0; alignmentStart--) {
if (updatedCounts[alignmentStart] > 1) {
updatedCounts[alignmentStart]--;
numReads--;
readPruned = true;
}
}
}
if (numReads == targetCoverage) {
updatedCounts[0]--;
numReads--;
}
BitSet toPurge = new BitSet(readStates.size());
int readOffset = 0;
for (int i = 0; i < updatedCounts.length; i++) {
int n = counts[i];
int k = updatedCounts[i];
for (Integer purgedElement : MathUtils.sampleIndicesWithoutReplacement(n, n - k))
toPurge.set(readOffset + purgedElement);
readOffset += counts[i];
}
downsamplingExtent = Math.max(downsamplingExtent, statesBySample.purge(toPurge));
addReadsToSample(statesBySample, newReads, targetCoverage - numReads);
statesBySample.specifyNewDownsamplingExtent(downsamplingExtent);
}
}
samplePartitioner.reset();
}
/**
* Add reads with the given sample name to the given hanger entry.
*
* @param readStates The list of read states to add this collection of reads.
* @param reads Reads to add. Selected reads will be pulled from this source.
* @param maxReads Maximum number of reads to add.
*/
private void addReadsToSample(final PerSampleReadStateManager readStates, final Collection<SAMRecord> reads, final long maxReads) {
if (reads.isEmpty())
return;
Collection<SAMRecordState> newReadStates = new LinkedList<SAMRecordState>();
int readCount = 0;
for (SAMRecord read : reads) {
if (readCount < maxReads) {
SAMRecordState state = new SAMRecordState(read);
state.stepForwardOnGenome();
newReadStates.add(state);
readCount++;
}
}
readStates.addStatesAtNextAlignmentStart(newReadStates);
}
private class PerSampleReadStateManager implements Iterable<SAMRecordState> {
private final Queue<SAMRecordState> readStates = new LinkedList<SAMRecordState>();
private final Deque<Counter> readStateCounter = new LinkedList<Counter>();
private int downsamplingExtent = 0;
public void addStatesAtNextAlignmentStart(Collection<SAMRecordState> states) {
readStates.addAll(states);
readStateCounter.add(new Counter(states.size()));
totalReadStates += states.size();
}
public boolean isEmpty() {
return readStates.isEmpty();
}
public SAMRecordState peek() {
return readStates.peek();
}
public int size() {
return readStates.size();
}
public void specifyNewDownsamplingExtent(int downsamplingExtent) {
this.downsamplingExtent = Math.max(this.downsamplingExtent, downsamplingExtent);
}
public int getDownsamplingExtent() {
return downsamplingExtent;
}
public int[] getCountsPerAlignmentStart() {
int[] counts = new int[readStateCounter.size()];
int index = 0;
for (Counter counter : readStateCounter)
counts[index++] = counter.getCount();
return counts;
}
public Iterator<SAMRecordState> iterator() {
return new Iterator<SAMRecordState>() {
private Iterator<SAMRecordState> wrappedIterator = readStates.iterator();
public boolean hasNext() {
return wrappedIterator.hasNext();
}
public SAMRecordState next() {
return wrappedIterator.next();
}
public void remove() {
wrappedIterator.remove();
Counter counter = readStateCounter.peek();
counter.decrement();
if (counter.getCount() == 0)
readStateCounter.remove();
}
};
}
/**
* Purge the given elements from the bitset. If an element in the bitset is true, purge
* the corresponding read state.
*
* @param elements bits from the set to purge.
* @return the extent of the final downsampled read.
*/
public int purge(final BitSet elements) {
int downsamplingExtent = 0;
if (elements.isEmpty() || readStates.isEmpty()) return downsamplingExtent;
Iterator<SAMRecordState> readStateIterator = readStates.iterator();
Iterator<Counter> counterIterator = readStateCounter.iterator();
Counter currentCounter = counterIterator.next();
int readIndex = 0;
long alignmentStartCounter = currentCounter.getCount();
int toPurge = elements.nextSetBit(0);
int removedCount = 0;
while (readStateIterator.hasNext() && toPurge >= 0) {
SAMRecordState state = readStateIterator.next();
downsamplingExtent = Math.max(downsamplingExtent, state.getRead().getAlignmentEnd());
if (readIndex == toPurge) {
readStateIterator.remove();
currentCounter.decrement();
if (currentCounter.getCount() == 0)
counterIterator.remove();
removedCount++;
toPurge = elements.nextSetBit(toPurge + 1);
}
readIndex++;
alignmentStartCounter--;
if (alignmentStartCounter == 0 && counterIterator.hasNext()) {
currentCounter = counterIterator.next();
alignmentStartCounter = currentCounter.getCount();
}
}
totalReadStates -= removedCount;
return downsamplingExtent;
}
}
}
/**
* Note: assuming that, whenever we downsample, we downsample to an integer capacity.
*/
static private class Counter {
private int count;
public Counter(int count) {
this.count = count;
}
public int getCount() {
return count;
}
public void decrement() {
count--;
}
}
}
/**
* Selects reads passed to it based on a criteria decided through inheritance.
* TODO: This is a temporary abstraction until we can get rid of this downsampling implementation and the mrl option. Get rid of this.
*/
interface ReadSelector {
/**
* All previous selectors in the chain have allowed this read. Submit it to this selector for consideration.
*
* @param read the read to evaluate.
*/
public void submitRead(SAMRecord read);
/**
* A previous selector has deemed this read unfit. Notify this selector so that this selector's counts are valid.
*
* @param read the read previously rejected.
*/
public void notifyReadRejected(SAMRecord read);
/**
* Signal the selector that read additions are complete.
*/
public void complete();
/**
* Retrieve the number of reads seen by this selector so far.
*
* @return number of reads seen.
*/
public long getNumReadsSeen();
/**
* Return the number of reads accepted by this selector so far.
*
* @return number of reads selected.
*/
public long getNumReadsSelected();
/**
* Gets the locus at which the last of the downsampled reads selected by this selector ends. The value returned will be the
* last aligned position from this selection to which a downsampled read aligns -- in other words, if a read is thrown out at
* position 3 whose cigar string is 76M, the value of this parameter will be 78.
*
* @return If any read has been downsampled, this will return the last aligned base of the longest alignment. Else, 0.
*/
public int getDownsamplingExtent();
/**
* Get the reads selected by this selector.
*
* @return collection of reads selected by this selector.
*/
public Collection<SAMRecord> getSelectedReads();
/**
* Reset this collection to its pre-gathered state.
*/
public void reset();
}
/**
* Select every read passed in.
*/
class AllReadsSelector implements ReadSelector {
private Collection<SAMRecord> reads = new LinkedList<SAMRecord>();
private long readsSeen = 0;
private int downsamplingExtent = 0;
public void submitRead(SAMRecord read) {
reads.add(read);
readsSeen++;
}
public void notifyReadRejected(SAMRecord read) {
readsSeen++;
downsamplingExtent = Math.max(downsamplingExtent, read.getAlignmentEnd());
}
public void complete() {
// NO-OP.
}
public long getNumReadsSeen() {
return readsSeen;
}
public long getNumReadsSelected() {
return readsSeen;
}
public int getDownsamplingExtent() {
return downsamplingExtent;
}
public Collection<SAMRecord> getSelectedReads() {
return reads;
}
public void reset() {
reads.clear();
readsSeen = 0;
downsamplingExtent = 0;
}
}
/**
* Select N reads randomly from the input stream.
*/
class NRandomReadSelector implements ReadSelector {
private final LegacyReservoirDownsampler<SAMRecord> reservoir;
private final ReadSelector chainedSelector;
private long readsSeen = 0;
private int downsamplingExtent = 0;
public NRandomReadSelector(ReadSelector chainedSelector, long readLimit) {
this.reservoir = new LegacyReservoirDownsampler<SAMRecord>((int) readLimit);
this.chainedSelector = chainedSelector;
}
public void submitRead(SAMRecord read) {
SAMRecord displaced = reservoir.add(read);
if (displaced != null && chainedSelector != null) {
chainedSelector.notifyReadRejected(read);
downsamplingExtent = Math.max(downsamplingExtent, read.getAlignmentEnd());
}
readsSeen++;
}
public void notifyReadRejected(SAMRecord read) {
readsSeen++;
}
public void complete() {
for (SAMRecord read : reservoir.getDownsampledContents())
chainedSelector.submitRead(read);
if (chainedSelector != null)
chainedSelector.complete();
}
public long getNumReadsSeen() {
return readsSeen;
}
public long getNumReadsSelected() {
return reservoir.size();
}
public int getDownsamplingExtent() {
return downsamplingExtent;
}
public Collection<SAMRecord> getSelectedReads() {
return reservoir.getDownsampledContents();
}
public void reset() {
reservoir.clear();
downsamplingExtent = 0;
if (chainedSelector != null)
chainedSelector.reset();
}
}
/**
* Note: stores reads by sample ID string, not by sample object
*/
class SamplePartitioner implements ReadSelector {
private final Map<String, ReadSelector> readsBySample;
private long readsSeen = 0;
public SamplePartitioner(Map<String, ReadSelector> readSelectors) {
readsBySample = readSelectors;
}
public void submitRead(SAMRecord read) {
String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null;
if (readsBySample.containsKey(sampleName))
readsBySample.get(sampleName).submitRead(read);
readsSeen++;
}
public void notifyReadRejected(SAMRecord read) {
String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null;
if (readsBySample.containsKey(sampleName))
readsBySample.get(sampleName).notifyReadRejected(read);
readsSeen++;
}
public void complete() {
// NO-OP.
}
public long getNumReadsSeen() {
return readsSeen;
}
public long getNumReadsSelected() {
return readsSeen;
}
public int getDownsamplingExtent() {
int downsamplingExtent = 0;
for (ReadSelector storage : readsBySample.values())
downsamplingExtent = Math.max(downsamplingExtent, storage.getDownsamplingExtent());
return downsamplingExtent;
}
public Collection<SAMRecord> getSelectedReads() {
throw new UnsupportedOperationException("Cannot directly get selected reads from a read partitioner.");
}
public ReadSelector getSelectedReads(String sampleName) {
if (!readsBySample.containsKey(sampleName))
throw new NoSuchElementException("Sample name not found");
return readsBySample.get(sampleName);
}
public void reset() {
for (ReadSelector storage : readsBySample.values())
storage.reset();
readsSeen = 0;
}
}

View File

@ -1,56 +0,0 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.gatk.iterators;
import net.sf.samtools.util.CloseableIterator;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import java.util.Iterator;
/**
* Iterator that traverses a SAM File, accumulating information on a per-locus basis
*/
public abstract class LocusIterator implements Iterable<AlignmentContext>, CloseableIterator<AlignmentContext> {
// -----------------------------------------------------------------------------------------------------------------
//
// constructors and other basic operations
//
// -----------------------------------------------------------------------------------------------------------------
public Iterator<AlignmentContext> iterator() {
return this;
}
public void close() {
//this.it.close();
}
public abstract boolean hasNext();
public abstract AlignmentContext next();
public void remove() {
throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
}
}

View File

@ -1,661 +0,0 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.gatk.iterators;
import net.sf.picard.util.PeekableIterator;
import net.sf.samtools.Cigar;
import net.sf.samtools.CigarElement;
import net.sf.samtools.CigarOperator;
import net.sf.samtools.SAMRecord;
import org.apache.log4j.Logger;
import org.broadinstitute.sting.gatk.ReadProperties;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.downsampling.*;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.exceptions.UserException;
import org.broadinstitute.sting.utils.pileup.PileupElement;
import org.broadinstitute.sting.utils.pileup.ReadBackedPileupImpl;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import org.broadinstitute.sting.utils.sam.ReadUtils;
import java.util.*;
/**
* Iterator that traverses a SAM File, accumulating information on a per-locus basis
*/
public class LocusIteratorByState extends LocusIterator {
/**
* our log, which we want to capture anything from this class
*/
private static Logger logger = Logger.getLogger(LegacyLocusIteratorByState.class);
// -----------------------------------------------------------------------------------------------------------------
//
// member fields
//
// -----------------------------------------------------------------------------------------------------------------
/**
* Used to create new GenomeLocs.
*/
private final GenomeLocParser genomeLocParser;
private final ArrayList<String> samples;
private final ReadStateManager readStates;
protected static class SAMRecordState {
SAMRecord read;
int readOffset = -1; // how far are we offset from the start of the read bases?
int genomeOffset = -1; // how far are we offset from the alignment start on the genome?
Cigar cigar = null;
int cigarOffset = -1;
CigarElement curElement = null;
int nCigarElements = 0;
int cigarElementCounter = -1; // how far are we into a single cigarElement
// The logical model for generating extended events is as follows: the "record state" implements the traversal
// along the reference; thus stepForwardOnGenome() returns on every and only on actual reference bases. This
// can be a (mis)match or a deletion (in the latter case, we still return on every individual reference base the
// deletion spans). In the extended events mode, the record state also remembers if there was an insertion, or
// if the deletion just started *right before* the current reference base the record state is pointing to upon the return from
// stepForwardOnGenome(). The next call to stepForwardOnGenome() will clear that memory (as we remember only extended
// events immediately preceding the current reference base).
public SAMRecordState(SAMRecord read) {
this.read = read;
cigar = read.getCigar();
nCigarElements = cigar.numCigarElements();
//System.out.printf("Creating a SAMRecordState: %s%n", this);
}
public SAMRecord getRead() {
return read;
}
/**
* What is our current offset in the read's bases that aligns us with the reference genome?
*
* @return
*/
public int getReadOffset() {
return readOffset;
}
/**
* What is the current offset w.r.t. the alignment state that aligns us to the readOffset?
*
* @return
*/
public int getGenomeOffset() {
return genomeOffset;
}
public int getGenomePosition() {
return read.getAlignmentStart() + getGenomeOffset();
}
public GenomeLoc getLocation(GenomeLocParser genomeLocParser) {
return genomeLocParser.createGenomeLoc(read.getReferenceName(), getGenomePosition());
}
public CigarOperator getCurrentCigarOperator() {
return curElement.getOperator();
}
public String toString() {
return String.format("%s ro=%d go=%d co=%d cec=%d %s", read.getReadName(), readOffset, genomeOffset, cigarOffset, cigarElementCounter, curElement);
}
public CigarElement peekForwardOnGenome() {
return ( cigarElementCounter + 1 > curElement.getLength() && cigarOffset + 1 < nCigarElements ? cigar.getCigarElement(cigarOffset + 1) : curElement );
}
public CigarElement peekBackwardOnGenome() {
return ( cigarElementCounter - 1 == 0 && cigarOffset - 1 > 0 ? cigar.getCigarElement(cigarOffset - 1) : curElement );
}
public CigarOperator stepForwardOnGenome() {
// we enter this method with readOffset = index of the last processed base on the read
// (-1 if we did not process a single base yet); this can be last matching base, or last base of an insertion
if (curElement == null || ++cigarElementCounter > curElement.getLength()) {
cigarOffset++;
if (cigarOffset < nCigarElements) {
curElement = cigar.getCigarElement(cigarOffset);
cigarElementCounter = 0;
// next line: guards against cigar elements of length 0; when new cigar element is retrieved,
// we reenter in order to re-check cigarElementCounter against curElement's length
return stepForwardOnGenome();
} else {
if (curElement != null && curElement.getOperator() == CigarOperator.D)
throw new UserException.MalformedBAM(read, "read ends with deletion. Cigar: " + read.getCigarString() + ". Although the SAM spec technically permits such reads, this is often indicative of malformed files. If you are sure you want to use this file, re-run your analysis with the extra option: -rf BadCigar");
// Reads that contain indels model the genomeOffset as the following base in the reference. Because
// we fall into this else block only when indels end the read, increment genomeOffset such that the
// current offset of this read is the next ref base after the end of the indel. This position will
// model a point on the reference somewhere after the end of the read.
genomeOffset++; // extended events need that. Logically, it's legal to advance the genomic offset here:
// we do step forward on the ref, and by returning null we also indicate that we are past the read end.
return null;
}
}
boolean done = false;
switch (curElement.getOperator()) {
case H: // ignore hard clips
case P: // ignore pads
cigarElementCounter = curElement.getLength();
break;
case I: // insertion w.r.t. the reference
case S: // soft clip
cigarElementCounter = curElement.getLength();
readOffset += curElement.getLength();
break;
case D: // deletion w.r.t. the reference
if (readOffset < 0) // we don't want reads starting with deletion, this is a malformed cigar string
throw new UserException.MalformedBAM(read, "read starts with deletion. Cigar: " + read.getCigarString() + ". Although the SAM spec technically permits such reads, this is often indicative of malformed files. If you are sure you want to use this file, re-run your analysis with the extra option: -rf BadCigar");
// should be the same as N case
genomeOffset++;
done = true;
break;
case N: // reference skip (looks and gets processed just like a "deletion", just different logical meaning)
genomeOffset++;
done = true;
break;
case M:
case EQ:
case X:
readOffset++;
genomeOffset++;
done = true;
break;
default:
throw new IllegalStateException("Case statement didn't deal with cigar op: " + curElement.getOperator());
}
return done ? curElement.getOperator() : stepForwardOnGenome();
}
}
//final boolean DEBUG = false;
//final boolean DEBUG2 = false && DEBUG;
private ReadProperties readInfo;
private AlignmentContext nextAlignmentContext;
private boolean performDownsampling;
// -----------------------------------------------------------------------------------------------------------------
//
// constructors and other basic operations
//
// -----------------------------------------------------------------------------------------------------------------
public LocusIteratorByState(final Iterator<SAMRecord> samIterator, ReadProperties readInformation, GenomeLocParser genomeLocParser, Collection<String> samples) {
this.readInfo = readInformation;
this.genomeLocParser = genomeLocParser;
this.samples = new ArrayList<String>(samples);
// LIBS will invoke the Reservoir and Leveling downsamplers on the read stream if we're
// downsampling to coverage by sample. SAMDataSource will have refrained from applying
// any downsamplers to the read stream in this case, in the expectation that LIBS will
// manage the downsampling. The reason for this is twofold: performance (don't have to
// split/re-assemble the read stream in SAMDataSource), and to enable partial downsampling
// of reads (eg., using half of a read, and throwing the rest away).
this.performDownsampling = readInfo.getDownsamplingMethod() != null &&
readInfo.getDownsamplingMethod().type == DownsampleType.BY_SAMPLE &&
readInfo.getDownsamplingMethod().toCoverage != null;
this.readStates = new ReadStateManager(samIterator);
// currently the GATK expects this LocusIteratorByState to accept empty sample lists, when
// there's no read data. So we need to throw this error only when samIterator.hasNext() is true
if (this.samples.isEmpty() && samIterator.hasNext()) {
throw new IllegalArgumentException("samples list must not be empty");
}
}
/**
* For testing only. Assumes that the incoming SAMRecords have no read groups, so creates a dummy sample list
* for the system.
*/
public final static Collection<String> sampleListForSAMWithoutReadGroups() {
List<String> samples = new ArrayList<String>();
samples.add(null);
return samples;
}
public Iterator<AlignmentContext> iterator() {
return this;
}
public void close() {
//this.it.close();
}
public boolean hasNext() {
lazyLoadNextAlignmentContext();
return (nextAlignmentContext != null);
//if ( DEBUG ) System.out.printf("hasNext() = %b%n", r);
}
private GenomeLoc getLocation() {
return readStates.isEmpty() ? null : readStates.getFirst().getLocation(genomeLocParser);
}
// -----------------------------------------------------------------------------------------------------------------
//
// next() routine and associated collection operations
//
// -----------------------------------------------------------------------------------------------------------------
public AlignmentContext next() {
lazyLoadNextAlignmentContext();
if (!hasNext())
throw new NoSuchElementException("LocusIteratorByState: out of elements.");
AlignmentContext currentAlignmentContext = nextAlignmentContext;
nextAlignmentContext = null;
return currentAlignmentContext;
}
/**
* Creates the next alignment context from the given state. Note that this is implemented as a lazy load method.
* nextAlignmentContext MUST BE null in order for this method to advance to the next entry.
*/
private void lazyLoadNextAlignmentContext() {
while (nextAlignmentContext == null && readStates.hasNext()) {
readStates.collectPendingReads();
final GenomeLoc location = getLocation();
final Map<String, ReadBackedPileupImpl> fullPileup = new HashMap<String, ReadBackedPileupImpl>();
// TODO: How can you determine here whether the current pileup has been downsampled?
boolean hasBeenSampled = false;
for (final String sample : samples) {
final Iterator<SAMRecordState> iterator = readStates.iterator(sample);
final List<PileupElement> pile = new ArrayList<PileupElement>(readStates.size(sample));
int size = 0; // number of elements in this sample's pileup
int nDeletions = 0; // number of deletions in this sample's pileup
int nMQ0Reads = 0; // number of MQ0 reads in this sample's pileup (warning: current implementation includes N bases that are MQ0)
while (iterator.hasNext()) {
final SAMRecordState state = iterator.next(); // state object with the read/offset information
final GATKSAMRecord read = (GATKSAMRecord) state.getRead(); // the actual read
final CigarOperator op = state.getCurrentCigarOperator(); // current cigar operator
final CigarElement nextElement = state.peekForwardOnGenome(); // next cigar element
final CigarElement lastElement = state.peekBackwardOnGenome(); // last cigar element
final boolean isSingleElementCigar = nextElement == lastElement;
final CigarOperator nextOp = nextElement.getOperator(); // next cigar operator
final CigarOperator lastOp = lastElement.getOperator(); // last cigar operator
int readOffset = state.getReadOffset(); // the base offset on this read
final boolean isBeforeDeletion = nextOp == CigarOperator.DELETION;
final boolean isAfterDeletion = lastOp == CigarOperator.DELETION;
final boolean isBeforeInsertion = nextOp == CigarOperator.INSERTION;
final boolean isAfterInsertion = lastOp == CigarOperator.INSERTION && !isSingleElementCigar;
final boolean isNextToSoftClip = nextOp == CigarOperator.S || (state.getGenomeOffset() == 0 && read.getSoftStart() != read.getAlignmentStart());
int nextElementLength = nextElement.getLength();
if (op == CigarOperator.N) // N's are never added to any pileup
continue;
if (op == CigarOperator.D) {
// TODO -- LIBS is totally busted for deletions so that reads with Ds right before Is in their CIGAR are broken; must fix
if (readInfo.includeReadsWithDeletionAtLoci()) { // only add deletions to the pileup if we are authorized to do so
pile.add(new PileupElement(read, readOffset, true, isBeforeDeletion, isAfterDeletion, isBeforeInsertion, isAfterInsertion, isNextToSoftClip, null, nextOp == CigarOperator.D ? nextElementLength : -1));
size++;
nDeletions++;
if (read.getMappingQuality() == 0)
nMQ0Reads++;
}
}
else {
if (!filterBaseInRead(read, location.getStart())) {
String insertedBaseString = null;
if (nextOp == CigarOperator.I) {
final int insertionOffset = isSingleElementCigar ? 0 : 1;
// TODO -- someone please implement a better fix for the single element insertion CIGAR!
if (isSingleElementCigar)
readOffset -= (nextElement.getLength() - 1); // LIBS has passed over the insertion bases!
insertedBaseString = new String(Arrays.copyOfRange(read.getReadBases(), readOffset + insertionOffset, readOffset + insertionOffset + nextElement.getLength()));
}
pile.add(new PileupElement(read, readOffset, false, isBeforeDeletion, isAfterDeletion, isBeforeInsertion, isAfterInsertion, isNextToSoftClip, insertedBaseString, nextElementLength));
size++;
if (read.getMappingQuality() == 0)
nMQ0Reads++;
}
}
}
if (pile.size() != 0) // if this pileup added at least one base, add it to the full pileup
fullPileup.put(sample, new ReadBackedPileupImpl(location, pile, size, nDeletions, nMQ0Reads));
}
updateReadStates(); // critical - must be called after we get the current state offsets and location
if (!fullPileup.isEmpty()) // if we got reads with non-D/N over the current position, we are done
nextAlignmentContext = new AlignmentContext(location, new ReadBackedPileupImpl(location, fullPileup), hasBeenSampled);
}
}
// fast testing of position
private boolean readIsPastCurrentPosition(SAMRecord read) {
if (readStates.isEmpty())
return false;
else {
SAMRecordState state = readStates.getFirst();
SAMRecord ourRead = state.getRead();
return read.getReferenceIndex() > ourRead.getReferenceIndex() || read.getAlignmentStart() > state.getGenomePosition();
}
}
/**
* Generic place to put per-base filters appropriate to LocusIteratorByState
*
* @param rec
* @param pos
* @return
*/
private static boolean filterBaseInRead(GATKSAMRecord rec, long pos) {
return ReadUtils.isBaseInsideAdaptor(rec, pos);
}
private void updateReadStates() {
for (final String sample : samples) {
Iterator<SAMRecordState> it = readStates.iterator(sample);
while (it.hasNext()) {
SAMRecordState state = it.next();
CigarOperator op = state.stepForwardOnGenome();
if (op == null) {
// we discard the read only when we are past its end AND indel at the end of the read (if any) was
// already processed. Keeping the read state that returned null upon stepForwardOnGenome() is safe
// as the next call to stepForwardOnGenome() will return null again AND will clear hadIndel() flag.
it.remove(); // we've stepped off the end of the object
}
}
}
}
public void remove() {
throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
}
protected class ReadStateManager {
private final PeekableIterator<SAMRecord> iterator;
private final SamplePartitioner samplePartitioner;
private final Map<String, PerSampleReadStateManager> readStatesBySample = new HashMap<String, PerSampleReadStateManager>();
private int totalReadStates = 0;
public ReadStateManager(Iterator<SAMRecord> source) {
this.iterator = new PeekableIterator<SAMRecord>(source);
for (final String sample : samples) {
readStatesBySample.put(sample, new PerSampleReadStateManager());
}
samplePartitioner = new SamplePartitioner(performDownsampling);
}
/**
* Returns a iterator over all the reads associated with the given sample. Note that remove() is implemented
* for this iterator; if present, total read states will be decremented.
*
* @param sample The sample.
* @return Iterator over the reads associated with that sample.
*/
public Iterator<SAMRecordState> iterator(final String sample) {
return new Iterator<SAMRecordState>() {
private Iterator<SAMRecordState> wrappedIterator = readStatesBySample.get(sample).iterator();
public boolean hasNext() {
return wrappedIterator.hasNext();
}
public SAMRecordState next() {
return wrappedIterator.next();
}
public void remove() {
wrappedIterator.remove();
}
};
}
public boolean isEmpty() {
return totalReadStates == 0;
}
/**
* Retrieves the total number of reads in the manager across all samples.
*
* @return Total number of reads over all samples.
*/
public int size() {
return totalReadStates;
}
/**
* Retrieves the total number of reads in the manager in the given sample.
*
* @param sample The sample.
* @return Total number of reads in the given sample.
*/
public int size(final String sample) {
return readStatesBySample.get(sample).size();
}
public SAMRecordState getFirst() {
for (final String sample : samples) {
PerSampleReadStateManager reads = readStatesBySample.get(sample);
if (!reads.isEmpty())
return reads.peek();
}
return null;
}
public boolean hasNext() {
return totalReadStates > 0 || iterator.hasNext();
}
public void collectPendingReads() {
if (!iterator.hasNext())
return;
if (readStates.size() == 0) {
int firstContigIndex = iterator.peek().getReferenceIndex();
int firstAlignmentStart = iterator.peek().getAlignmentStart();
while (iterator.hasNext() && iterator.peek().getReferenceIndex() == firstContigIndex && iterator.peek().getAlignmentStart() == firstAlignmentStart) {
samplePartitioner.submitRead(iterator.next());
}
} else {
// Fast fail in the case that the read is past the current position.
if (readIsPastCurrentPosition(iterator.peek()))
return;
while (iterator.hasNext() && !readIsPastCurrentPosition(iterator.peek())) {
samplePartitioner.submitRead(iterator.next());
}
}
samplePartitioner.doneSubmittingReads();
for (final String sample : samples) {
Collection<SAMRecord> newReads = samplePartitioner.getReadsForSample(sample);
PerSampleReadStateManager statesBySample = readStatesBySample.get(sample);
addReadsToSample(statesBySample, newReads);
}
samplePartitioner.reset();
}
/**
* Add reads with the given sample name to the given hanger entry.
*
* @param readStates The list of read states to add this collection of reads.
* @param reads Reads to add. Selected reads will be pulled from this source.
*/
private void addReadsToSample(final PerSampleReadStateManager readStates, final Collection<SAMRecord> reads) {
if (reads.isEmpty())
return;
Collection<SAMRecordState> newReadStates = new LinkedList<SAMRecordState>();
for (SAMRecord read : reads) {
SAMRecordState state = new SAMRecordState(read);
state.stepForwardOnGenome();
newReadStates.add(state);
}
readStates.addStatesAtNextAlignmentStart(newReadStates);
}
protected class PerSampleReadStateManager implements Iterable<SAMRecordState> {
private List<LinkedList<SAMRecordState>> readStatesByAlignmentStart = new LinkedList<LinkedList<SAMRecordState>>();
private int thisSampleReadStates = 0;
private Downsampler<LinkedList<SAMRecordState>> levelingDownsampler =
performDownsampling ?
new LevelingDownsampler<LinkedList<SAMRecordState>, SAMRecordState>(readInfo.getDownsamplingMethod().toCoverage) :
null;
public void addStatesAtNextAlignmentStart(Collection<SAMRecordState> states) {
if ( states.isEmpty() ) {
return;
}
readStatesByAlignmentStart.add(new LinkedList<SAMRecordState>(states));
thisSampleReadStates += states.size();
totalReadStates += states.size();
if ( levelingDownsampler != null ) {
levelingDownsampler.submit(readStatesByAlignmentStart);
levelingDownsampler.signalEndOfInput();
thisSampleReadStates -= levelingDownsampler.getNumberOfDiscardedItems();
totalReadStates -= levelingDownsampler.getNumberOfDiscardedItems();
// use returned List directly rather than make a copy, for efficiency's sake
readStatesByAlignmentStart = levelingDownsampler.consumeFinalizedItems();
levelingDownsampler.reset();
}
}
public boolean isEmpty() {
return readStatesByAlignmentStart.isEmpty();
}
public SAMRecordState peek() {
return isEmpty() ? null : readStatesByAlignmentStart.get(0).peek();
}
public int size() {
return thisSampleReadStates;
}
public Iterator<SAMRecordState> iterator() {
return new Iterator<SAMRecordState>() {
private Iterator<LinkedList<SAMRecordState>> alignmentStartIterator = readStatesByAlignmentStart.iterator();
private LinkedList<SAMRecordState> currentPositionReadStates = null;
private Iterator<SAMRecordState> currentPositionReadStatesIterator = null;
public boolean hasNext() {
return alignmentStartIterator.hasNext() ||
(currentPositionReadStatesIterator != null && currentPositionReadStatesIterator.hasNext());
}
public SAMRecordState next() {
if ( currentPositionReadStatesIterator == null || ! currentPositionReadStatesIterator.hasNext() ) {
currentPositionReadStates = alignmentStartIterator.next();
currentPositionReadStatesIterator = currentPositionReadStates.iterator();
}
return currentPositionReadStatesIterator.next();
}
public void remove() {
currentPositionReadStatesIterator.remove();
thisSampleReadStates--;
totalReadStates--;
if ( currentPositionReadStates.isEmpty() ) {
alignmentStartIterator.remove();
}
}
};
}
}
}
/**
* Divides reads by sample and (if requested) does a preliminary downsampling pass with a ReservoirDownsampler.
*
* Note: stores reads by sample ID string, not by sample object
*/
private class SamplePartitioner {
private Map<String, Downsampler<SAMRecord>> readsBySample;
public SamplePartitioner( boolean downsampleReads ) {
readsBySample = new HashMap<String, Downsampler<SAMRecord>>();
for ( String sample : samples ) {
readsBySample.put(sample,
downsampleReads ? new ReservoirDownsampler<SAMRecord>(readInfo.getDownsamplingMethod().toCoverage) :
new PassThroughDownsampler<SAMRecord>());
}
}
public void submitRead(SAMRecord read) {
String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null;
if (readsBySample.containsKey(sampleName))
readsBySample.get(sampleName).submit(read);
}
public void doneSubmittingReads() {
for ( Map.Entry<String, Downsampler<SAMRecord>> perSampleReads : readsBySample.entrySet() ) {
perSampleReads.getValue().signalEndOfInput();
}
}
public Collection<SAMRecord> getReadsForSample(String sampleName) {
if ( ! readsBySample.containsKey(sampleName) )
throw new NoSuchElementException("Sample name not found");
return readsBySample.get(sampleName).consumeFinalizedItems();
}
public void reset() {
for ( Map.Entry<String, Downsampler<SAMRecord>> perSampleReads : readsBySample.entrySet() ) {
perSampleReads.getValue().clear();
perSampleReads.getValue().reset();
}
}
}
}

View File

@ -61,6 +61,7 @@ public class RefMetaDataTracker {
final Map<String, RODRecordList> bindings;
final protected static Logger logger = Logger.getLogger(RefMetaDataTracker.class);
public final static RefMetaDataTracker EMPTY_TRACKER = new RefMetaDataTracker();
// ------------------------------------------------------------------------------------------
//

View File

@ -194,17 +194,18 @@ public class VariantContextAdaptors {
return null; // we weren't given enough reference context to create the VariantContext
final byte refBaseForIndel = ref.getBases()[index];
final boolean refBaseIsDash = dbsnp.getNCBIRefBase().equals("-");
boolean addPaddingBase;
if ( isSNP(dbsnp) || isMNP(dbsnp) )
addPaddingBase = false;
else if ( isIndel(dbsnp) || dbsnp.getVariantType().contains("mixed") )
addPaddingBase = VariantContextUtils.requiresPaddingBase(stripNullDashes(getAlleleList(dbsnp)));
addPaddingBase = refBaseIsDash || VariantContextUtils.requiresPaddingBase(stripNullDashes(getAlleleList(dbsnp)));
else
return null; // can't handle anything else
Allele refAllele;
if ( dbsnp.getNCBIRefBase().equals("-") )
if ( refBaseIsDash )
refAllele = Allele.create(refBaseForIndel, true);
else if ( ! Allele.acceptableAlleleBases(dbsnp.getNCBIRefBase()) )
return null;

View File

@ -74,7 +74,7 @@ public abstract class TraversalEngine<M,T,WalkerType extends Walker<M,T>,Provide
* @param engine GenomeAnalysisEngine for this traversal
* @param progressMeter An optional (null == optional) meter to track our progress
*/
public void initialize(final GenomeAnalysisEngine engine, final ProgressMeter progressMeter) {
public void initialize(final GenomeAnalysisEngine engine, final Walker walker, final ProgressMeter progressMeter) {
if ( engine == null )
throw new ReviewedStingException("BUG: GenomeAnalysisEngine cannot be null!");
@ -87,8 +87,8 @@ public abstract class TraversalEngine<M,T,WalkerType extends Walker<M,T>,Provide
*
* @param engine
*/
protected void initialize(final GenomeAnalysisEngine engine) {
initialize(engine, null);
protected void initialize(final GenomeAnalysisEngine engine, final Walker walker) {
initialize(engine, walker, null);
}
/**

View File

@ -25,11 +25,14 @@
package org.broadinstitute.sting.gatk.traversals;
import com.google.java.contract.Requires;
import org.apache.log4j.Logger;
import org.broadinstitute.sting.gatk.GenomeAnalysisEngine;
import org.broadinstitute.sting.gatk.WalkerManager;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.datasources.providers.*;
import org.broadinstitute.sting.gatk.datasources.reads.Shard;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.ActiveRegionExtension;
import org.broadinstitute.sting.gatk.walkers.ActiveRegionWalker;
@ -39,31 +42,182 @@ import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.activeregion.ActiveRegion;
import org.broadinstitute.sting.utils.activeregion.ActivityProfile;
import org.broadinstitute.sting.utils.activeregion.ActivityProfileResult;
import org.broadinstitute.sting.utils.pileup.PileupElement;
import org.broadinstitute.sting.utils.progressmeter.ProgressMeter;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import java.util.*;
/**
* Created by IntelliJ IDEA.
* User: rpoplin
* Date: 12/9/11
* Implement active region traversal
*
* User: depristo
* Date: 1/9/13
* Time: 4:45 PM
*
* Live region:
*
* The ART tracks a thing called the live region. The live region is a position on a specific contig
* of the alignment start of the last read we processed during this traversal. Because the
* read stream is sorted, future reads must occurs in the the live region. Therefore the the dead region
* (everything to the left of the live boundary) cannot have any more read data. The live / dead
* regions are used to decide when we can safely call map on active regions, as only active regions
* contained completely within the dead region (including extensions) have a complete set of read data
* in the collected read list. All of the data related to the live region is captured by the local
* variable spanOfLastReadSeen
*
*/
public class TraverseActiveRegions <M,T> extends TraversalEngine<M,T,ActiveRegionWalker<M,T>,LocusShardDataProvider> {
/**
* our log, which we want to capture anything from this class
*/
public class TraverseActiveRegions<M, T> extends TraversalEngine<M,T,ActiveRegionWalker<M,T>,LocusShardDataProvider> {
protected final static Logger logger = Logger.getLogger(TraversalEngine.class);
protected final static boolean DEBUG = false;
// set by the tranversal
private int activeRegionExtension = -1;
private int maxRegionSize = -1;
private final LinkedList<ActiveRegion> workQueue = new LinkedList<ActiveRegion>();
private final LinkedHashSet<GATKSAMRecord> myReads = new LinkedHashSet<GATKSAMRecord>();
private LinkedList<GATKSAMRecord> myReads = new LinkedList<GATKSAMRecord>();
private GenomeLoc spanOfLastReadSeen = null;
protected int getActiveRegionExtension() {
return activeRegionExtension;
}
protected int getMaxRegionSize() {
return maxRegionSize;
}
@Override
public String getTraversalUnits() {
return "active regions";
}
@Override
public String toString() {
return "TraverseActiveRegions";
}
@Override
public void initialize(GenomeAnalysisEngine engine, Walker walker, ProgressMeter progressMeter) {
super.initialize(engine, walker, progressMeter);
activeRegionExtension = walker.getClass().getAnnotation(ActiveRegionExtension.class).extension();
maxRegionSize = walker.getClass().getAnnotation(ActiveRegionExtension.class).maxRegion();
final ActiveRegionWalker arWalker = (ActiveRegionWalker)walker;
if ( arWalker.wantsExtendedReads() && ! arWalker.wantsNonPrimaryReads() ) {
throw new IllegalArgumentException("Active region walker " + arWalker + " requested extended events but not " +
"non-primary reads, an inconsistent state. Please modify the walker");
}
}
/**
* Is the loc outside of the intervals being requested for processing by the GATK?
* @param loc
* @return
*/
protected boolean outsideEngineIntervals(final GenomeLoc loc) {
return engine.getIntervals() != null && ! engine.getIntervals().overlaps(loc);
}
/**
* Take the individual isActive calls and integrate them into contiguous active regions and
* add these blocks of work to the work queue
* band-pass filter the list of isActive probabilities and turn into active regions
*
* @param profile
* @param activeRegions
* @return
*/
protected ActivityProfile incorporateActiveRegions(final ActivityProfile profile,
final List<ActiveRegion> activeRegions) {
if ( profile.isEmpty() )
throw new IllegalStateException("trying to incorporate an empty active profile " + profile);
final ActivityProfile bandPassFiltered = profile.bandPassFilter();
activeRegions.addAll(bandPassFiltered.createActiveRegions( getActiveRegionExtension(), getMaxRegionSize() ));
return new ActivityProfile( engine.getGenomeLocParser(), profile.hasPresetRegions() );
}
protected final ActivityProfileResult walkerActiveProb(final ActiveRegionWalker<M, T> walker,
final RefMetaDataTracker tracker, final ReferenceContext refContext,
final AlignmentContext locus, final GenomeLoc location) {
if ( walker.hasPresetActiveRegions() ) {
return new ActivityProfileResult(location, walker.presetActiveRegions.overlaps(location) ? 1.0 : 0.0);
} else {
return walker.isActive( tracker, refContext, locus );
}
}
protected ReferenceOrderedView getReferenceOrderedView(final ActiveRegionWalker<M, T> walker,
final LocusShardDataProvider dataProvider,
final LocusView locusView) {
if ( WalkerManager.getWalkerDataSource(walker) != DataSource.REFERENCE_ORDERED_DATA )
return new ManagingReferenceOrderedView( dataProvider );
else
return (RodLocusView)locusView;
}
/**
* Write out each active region to the walker activeRegionOutStream
*
* @param walker
*/
protected void writeActiveRegionsToStream( final ActiveRegionWalker<M, T> walker ) {
// Just want to output the active regions to a file, not actually process them
for( final ActiveRegion activeRegion : workQueue ) {
if( activeRegion.isActive ) {
walker.activeRegionOutStream.println( activeRegion.getLocation() );
}
}
}
/**
* Did read appear in the last shard?
*
* When we transition across shard boundaries we see duplicate reads because
* each shard contains the reads that *overlap* the shard. So if we just finished
* shard 1-1000 and are now in 1001-2000 we'll see duplicate reads from 1001
* that overlapped 1-1000. This function tests read to determine if we would have
* seen it before by asking if read.getAlignmentStart() is less than the
* stop position of the last seen read at the start of the traversal. The reason
* we need to use the location of the last read at the start of the traversal
* is that we update the lastRead during the traversal, and we only want to filter
* out reads whose start is before the last read of the previous shard, not the
* current shard.
*
* @param locOfLastReadAtTraversalStart the location of the last read seen at the start of the traversal
* @param read the read we want to test if it's already been seen in the last shard
* @return true if read would have appeared in the last shard, false otherwise
*/
protected boolean appearedInLastShard(final GenomeLoc locOfLastReadAtTraversalStart, final GATKSAMRecord read) {
if ( locOfLastReadAtTraversalStart == null )
// we're in the first shard, so obviously the answer is no
return false;
else {
// otherwise check to see if the alignment occurred in the previous shard
return read.getAlignmentStart() <= locOfLastReadAtTraversalStart.getStart()
// we're on the same contig
&& read.getReferenceIndex() == locOfLastReadAtTraversalStart.getContigIndex();
}
}
// -------------------------------------------------------------------------------------
//
// Actual traverse function
//
// -------------------------------------------------------------------------------------
/**
* Is the current shard on a new contig w.r.t. the previous shard?
* @param currentShard the current shard we are processing
* @return true if the last shard was on a different contig than the current shard
*/
private boolean onNewContig(final Shard currentShard) {
return spanOfLastSeenRead() != null
&& spanOfLastSeenRead().getContigIndex() != currentShard.getLocation().getContigIndex();
}
@Override
public T traverse( final ActiveRegionWalker<M,T> walker,
final LocusShardDataProvider dataProvider,
@ -73,16 +227,19 @@ public class TraverseActiveRegions <M,T> extends TraversalEngine<M,T,ActiveRegio
final LocusView locusView = new AllLocusView(dataProvider);
final LocusReferenceView referenceView = new LocusReferenceView( walker, dataProvider );
final int activeRegionExtension = walker.getClass().getAnnotation(ActiveRegionExtension.class).extension();
final int maxRegionSize = walker.getClass().getAnnotation(ActiveRegionExtension.class).maxRegion();
int minStart = Integer.MAX_VALUE;
final List<ActiveRegion> activeRegions = new LinkedList<ActiveRegion>();
ActivityProfile profile = new ActivityProfile(engine.getGenomeLocParser(), walker.hasPresetActiveRegions() );
ReferenceOrderedView referenceOrderedDataView = getReferenceOrderedView(walker, dataProvider, locusView);
// We keep processing while the next reference location is within the interval
final GenomeLoc locOfLastReadAtTraversalStart = spanOfLastSeenRead();
// if we've moved onto a new contig, process all of the active regions
if ( onNewContig(dataProvider.getShard()) )
sum = processActiveRegions(walker, sum, true);
GenomeLoc prevLoc = null;
while( locusView.hasNext() ) {
final AlignmentContext locus = locusView.next();
@ -91,17 +248,15 @@ public class TraverseActiveRegions <M,T> extends TraversalEngine<M,T,ActiveRegio
// Grab all the previously unseen reads from this pileup and add them to the massive read list
// Note that this must occur before we leave because we are outside the intervals because
// reads may occur outside our intervals but overlap them in the future
// TODO -- this whole HashSet logic should be changed to a linked list of reads with
// TODO -- subsequent pass over them to find the ones overlapping the active regions
for( final PileupElement p : locus.getBasePileup() ) {
final GATKSAMRecord read = p.getRead();
if( !myReads.contains(read) ) {
final Collection<GATKSAMRecord> reads = locusView.getLIBS().transferReadsFromAllPreviousPileups();
for( final GATKSAMRecord read : reads ) {
if ( appearedInLastShard(locOfLastReadAtTraversalStart, read) ) {
if ( DEBUG ) logger.warn("Skipping duplicated " + read.getReadName());
} else {
if ( DEBUG ) logger.warn("Adding read " + read.getReadName() + " at " + engine.getGenomeLocParser().createGenomeLoc(read) + " from provider " + dataProvider);
rememberLastReadLocation(read);
myReads.add(read);
}
// If this is the last pileup for this shard calculate the minimum alignment start so that we know
// which active regions in the work queue are now safe to process
minStart = Math.min(minStart, read.getAlignmentStart());
}
// skip this location -- it's not part of our engine intervals
@ -110,7 +265,7 @@ public class TraverseActiveRegions <M,T> extends TraversalEngine<M,T,ActiveRegio
if ( prevLoc != null && location.getStart() != prevLoc.getStop() + 1 ) {
// we've move across some interval boundary, restart profile
profile = incorporateActiveRegions(profile, activeRegions, activeRegionExtension, maxRegionSize);
profile = incorporateActiveRegions(profile, activeRegions);
}
dataProvider.getShard().getReadMetrics().incrementNumIterations();
@ -133,7 +288,7 @@ public class TraverseActiveRegions <M,T> extends TraversalEngine<M,T,ActiveRegio
updateCumulativeMetrics(dataProvider.getShard());
if ( ! profile.isEmpty() )
incorporateActiveRegions(profile, activeRegions, activeRegionExtension, maxRegionSize);
incorporateActiveRegions(profile, activeRegions);
// add active regions to queue of regions to process
// first check if can merge active regions over shard boundaries
@ -141,10 +296,10 @@ public class TraverseActiveRegions <M,T> extends TraversalEngine<M,T,ActiveRegio
if( !workQueue.isEmpty() ) {
final ActiveRegion last = workQueue.getLast();
final ActiveRegion first = activeRegions.get(0);
if( last.isActive == first.isActive && last.getLocation().contiguousP(first.getLocation()) && last.getLocation().size() + first.getLocation().size() <= maxRegionSize ) {
if( last.isActive == first.isActive && last.getLocation().contiguousP(first.getLocation()) && last.getLocation().size() + first.getLocation().size() <= getMaxRegionSize() ) {
workQueue.removeLast();
activeRegions.remove(first);
workQueue.add( new ActiveRegion(last.getLocation().union(first.getLocation()), first.isActive, this.engine.getGenomeLocParser(), activeRegionExtension) );
workQueue.add( new ActiveRegion(last.getLocation().union(first.getLocation()), first.isActive, this.engine.getGenomeLocParser(), getActiveRegionExtension()) );
}
}
workQueue.addAll( activeRegions );
@ -153,106 +308,109 @@ public class TraverseActiveRegions <M,T> extends TraversalEngine<M,T,ActiveRegio
logger.debug("Integrated " + profile.size() + " isActive calls into " + activeRegions.size() + " regions." );
// now go and process all of the active regions
sum = processActiveRegions(walker, sum, minStart, dataProvider.getLocus().getContig());
sum = processActiveRegions(walker, sum, false);
return sum;
}
/**
* Is the loc outside of the intervals being requested for processing by the GATK?
* @param loc
* @return
* Special function called in LinearMicroScheduler to empty out the work queue.
* Ugly for now but will be cleaned up when we push this functionality more into the engine
*/
private boolean outsideEngineIntervals(final GenomeLoc loc) {
return engine.getIntervals() != null && ! engine.getIntervals().overlaps(loc);
public T endTraversal(final Walker<M, T> walker, T sum) {
return processActiveRegions((ActiveRegionWalker<M, T>)walker, sum, true);
}
// -------------------------------------------------------------------------------------
//
// Functions to manage and interact with the live / dead zone
//
// -------------------------------------------------------------------------------------
/**
* Take the individual isActive calls and integrate them into contiguous active regions and
* add these blocks of work to the work queue
* band-pass filter the list of isActive probabilities and turn into active regions
* Update the live region to reflect that the last read we've seen in the traversal is read
*
* @param profile
* @param activeRegions
* @param activeRegionExtension
* @param maxRegionSize
* @return
* Requires that sequential calls always be provided reads in coordinate sorted order
*
* @param read the last read we've seen during the traversal
*/
private ActivityProfile incorporateActiveRegions(final ActivityProfile profile,
final List<ActiveRegion> activeRegions,
final int activeRegionExtension,
final int maxRegionSize) {
if ( profile.isEmpty() )
throw new IllegalStateException("trying to incorporate an empty active profile " + profile);
final ActivityProfile bandPassFiltered = profile.bandPassFilter();
activeRegions.addAll(bandPassFiltered.createActiveRegions( activeRegionExtension, maxRegionSize ));
return new ActivityProfile( engine.getGenomeLocParser(), profile.hasPresetRegions() );
}
// --------------------------------------------------------------------------------
//
// simple utility functions
//
// --------------------------------------------------------------------------------
private final ActivityProfileResult walkerActiveProb(final ActiveRegionWalker<M,T> walker,
final RefMetaDataTracker tracker, final ReferenceContext refContext,
final AlignmentContext locus, final GenomeLoc location) {
if ( walker.hasPresetActiveRegions() ) {
return new ActivityProfileResult(location, walker.presetActiveRegions.overlaps(location) ? 1.0 : 0.0);
} else {
return walker.isActive( tracker, refContext, locus );
protected void rememberLastReadLocation(final GATKSAMRecord read) {
final GenomeLoc currentLocation = engine.getGenomeLocParser().createGenomeLoc(read);
if ( spanOfLastReadSeen == null )
spanOfLastReadSeen = currentLocation;
else {
if ( currentLocation.isBefore(spanOfLastReadSeen) )
throw new IllegalStateException("Updating last read seen in the traversal with read " + read + " with span " + currentLocation + " but this occurs before the previously seen read " + spanOfLastReadSeen);
spanOfLastReadSeen = currentLocation;
}
}
private ReferenceOrderedView getReferenceOrderedView( final ActiveRegionWalker<M,T> walker,
final LocusShardDataProvider dataProvider,
final LocusView locusView) {
if ( WalkerManager.getWalkerDataSource(walker) != DataSource.REFERENCE_ORDERED_DATA )
return new ManagingReferenceOrderedView( dataProvider );
else
return (RodLocusView)locusView;
/**
* Get a GenomeLoc indicating the start (heading to the right) of the live ART region.
* @return the left-most position of the live region on the genome
*/
protected GenomeLoc spanOfLastSeenRead() {
return spanOfLastReadSeen;
}
// --------------------------------------------------------------------------------
//
// code to handle processing active regions
//
// --------------------------------------------------------------------------------
/**
* Is the active region completely within the traversal's dead zone?
*
* @param region the region we want to test
* @return true if the extended location of region is completely within the current dead zone, false otherwise
*/
protected boolean regionCompletelyWithinDeadZone(final ActiveRegion region) {
return region.getExtendedLoc().getStop() < spanOfLastSeenRead().getStart()
|| ! region.getExtendedLoc().onSameContig(spanOfLastSeenRead());
}
private T processActiveRegions( final ActiveRegionWalker<M,T> walker, T sum, final int minStart, final String currentContig ) {
/**
* Is the read dead? That is, can it no longer be in any future active region, and therefore can be discarded?
*
* read: start |--------> stop ------ stop + extension
* region: start |-----------------| end
*
* Since the regions are coming in order, read could potentially be contained in a future interval if
* stop + activeRegionExtension >= end. If, on the other hand, stop + extension is < the end
* of this region, then we can discard it, since any future region could only include reads
* up to end + 1 - extension.
*
* Note that this function doesn't care about the dead zone. We're assuming that by
* actually calling this function with an active region that region is already in the dead zone,
* so checking that the read is in the dead zone doesn't make sense.
*
* @param read the read we're testing
* @param activeRegion the current active region
* @return true if the read is dead, false other
*/
@Requires({"read != null", "activeRegion != null"})
private boolean readCannotOccurInAnyMoreActiveRegions(final GATKSAMRecord read, final ActiveRegion activeRegion) {
return read.getAlignmentEnd() + getActiveRegionExtension() < activeRegion.getLocation().getStop();
}
// -------------------------------------------------------------------------------------
//
// Functions to process active regions that are ready for map / reduce calls
//
// -------------------------------------------------------------------------------------
private T processActiveRegions(final ActiveRegionWalker<M, T> walker, T sum, final boolean forceRegionsToBeActive) {
if( walker.activeRegionOutStream != null ) {
writeActiveRegionsToStream(walker);
return sum;
} else {
return callWalkerMapOnActiveRegions(walker, sum, minStart, currentContig);
return callWalkerMapOnActiveRegions(walker, sum, forceRegionsToBeActive);
}
}
/**
* Write out each active region to the walker activeRegionOutStream
*
* @param walker
*/
private void writeActiveRegionsToStream( final ActiveRegionWalker<M,T> walker ) {
// Just want to output the active regions to a file, not actually process them
for( final ActiveRegion activeRegion : workQueue ) {
if( activeRegion.isActive ) {
walker.activeRegionOutStream.println( activeRegion.getLocation() );
}
}
}
private T callWalkerMapOnActiveRegions( final ActiveRegionWalker<M,T> walker, T sum, final int minStart, final String currentContig ) {
private T callWalkerMapOnActiveRegions(final ActiveRegionWalker<M, T> walker, T sum, final boolean forceRegionsToBeActive) {
// Since we've traversed sufficiently past this point (or this contig!) in the workQueue we can unload those regions and process them
// TODO can implement parallel traversal here
while( workQueue.peek() != null ) {
final GenomeLoc extendedLoc = workQueue.peek().getExtendedLoc();
if ( extendedLoc.getStop() < minStart || (currentContig != null && !workQueue.peek().getExtendedLoc().getContig().equals(currentContig))) {
final ActiveRegion activeRegion = workQueue.remove();
sum = processActiveRegion( activeRegion, sum, walker );
final ActiveRegion activeRegion = workQueue.peek();
if ( forceRegionsToBeActive || regionCompletelyWithinDeadZone(activeRegion) ) {
if ( DEBUG ) logger.warn("Processing active region " + activeRegion + " dead zone " + spanOfLastSeenRead());
sum = processActiveRegion( workQueue.remove(), sum, walker );
} else {
break;
}
@ -261,61 +419,33 @@ public class TraverseActiveRegions <M,T> extends TraversalEngine<M,T,ActiveRegio
return sum;
}
private T processActiveRegion( final ActiveRegion activeRegion, final T sum, final ActiveRegionWalker<M,T> walker ) {
final ArrayList<GATKSAMRecord> placedReads = new ArrayList<GATKSAMRecord>();
for( final GATKSAMRecord read : myReads ) {
protected T processActiveRegion(final ActiveRegion activeRegion, final T sum, final ActiveRegionWalker<M, T> walker) {
final Iterator<GATKSAMRecord> liveReads = myReads.iterator();
while ( liveReads.hasNext() ) {
boolean killed = false;
final GATKSAMRecord read = liveReads.next();
final GenomeLoc readLoc = this.engine.getGenomeLocParser().createGenomeLoc( read );
if( activeRegion.getLocation().overlapsP( readLoc ) ) {
// The region which the highest amount of overlap is chosen as the primary region for the read (tie breaking is done as right most region)
long maxOverlap = activeRegion.getLocation().sizeOfOverlap( readLoc );
ActiveRegion bestRegion = activeRegion;
for( final ActiveRegion otherRegionToTest : workQueue ) {
if( otherRegionToTest.getLocation().sizeOfOverlap(readLoc) >= maxOverlap ) {
maxOverlap = otherRegionToTest.getLocation().sizeOfOverlap( readLoc );
bestRegion = otherRegionToTest;
}
}
bestRegion.add( read );
// The read is also added to all other regions in which it overlaps but marked as non-primary
if( walker.wantsNonPrimaryReads() ) {
if( !bestRegion.equals(activeRegion) ) {
activeRegion.add( read );
}
for( final ActiveRegion otherRegionToTest : workQueue ) {
if( !bestRegion.equals(otherRegionToTest) ) {
// check for non-primary vs. extended
if ( otherRegionToTest.getLocation().overlapsP( readLoc ) ) {
otherRegionToTest.add( read );
} else if ( walker.wantsExtendedReads() && otherRegionToTest.getExtendedLoc().overlapsP( readLoc ) ) {
otherRegionToTest.add( read );
}
}
}
}
placedReads.add( read );
// check for non-primary vs. extended
} else if( activeRegion.getLocation().overlapsP( readLoc ) ) {
if ( walker.wantsNonPrimaryReads() ) {
activeRegion.add( read );
if( activeRegion.getLocation().overlapsP( readLoc ) ) {
activeRegion.add(read);
if ( ! walker.wantsNonPrimaryReads() ) {
if ( DEBUG ) logger.warn("Removing read " + read.getReadName() + " at " + readLoc + " with dead zone start " + spanOfLastSeenRead());
liveReads.remove();
killed = true;
}
} else if( walker.wantsExtendedReads() && activeRegion.getExtendedLoc().overlapsP( readLoc )) {
activeRegion.add( read );
}
if ( ! killed && readCannotOccurInAnyMoreActiveRegions(read, activeRegion) ) {
if ( DEBUG ) logger.warn("Removing read " + read.getReadName() + " at " + readLoc + " with dead zone start " + spanOfLastSeenRead());
liveReads.remove();
}
}
myReads.removeAll( placedReads ); // remove all the reads which have been placed into their active region
// WARNING: This hashset relies on reads being exactly equal when they are placed in the list as when they are removed. So the ActiveRegionWalker can't modify the reads in any way.
logger.debug(">> Map call with " + activeRegion.getReads().size() + " " + (activeRegion.isActive ? "active" : "inactive") + " reads @ " + activeRegion.getLocation() + " with full extent: " + activeRegion.getReferenceLoc());
final M x = walker.map( activeRegion, null );
final M x = walker.map(activeRegion, null);
return walker.reduce( x, sum );
}
/**
* Special function called in LinearMicroScheduler to empty out the work queue.
* Ugly for now but will be cleaned up when we push this functionality more into the engine
*/
public T endTraversal( final Walker<M,T> walker, T sum) {
return processActiveRegions((ActiveRegionWalker<M,T>)walker, sum, Integer.MAX_VALUE, null);
}
}

View File

@ -1,47 +1,26 @@
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE).
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012 Broad Institute, Inc.
* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 4. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 5. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 6. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 7. MISCELLANEOUS
* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.gatk.walkers.coverage;

View File

@ -1,47 +1,26 @@
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE).
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012 Broad Institute, Inc.
* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 4. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 5. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 6. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 7. MISCELLANEOUS
* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.gatk.walkers.coverage;

View File

@ -217,9 +217,9 @@ public class CoverageUtils {
private static void updateCounts(int[] counts, PileupElement e) {
if ( e.isDeletion() ) {
counts[BaseUtils.DELETION_INDEX] += e.getRepresentativeCount();
} else if ( BaseUtils.basesAreEqual((byte) 'N', e.getBase()) ) {
counts[BaseUtils.NO_CALL_INDEX] += e.getRepresentativeCount();
counts[BaseUtils.Base.D.ordinal()] += e.getRepresentativeCount();
} else if ( BaseUtils.basesAreEqual(BaseUtils.Base.N.base, e.getBase()) ) {
counts[BaseUtils.Base.N.ordinal()] += e.getRepresentativeCount();
} else {
try {
counts[BaseUtils.simpleBaseToBaseIndex(e.getBase())] += e.getRepresentativeCount();

View File

@ -1,47 +1,26 @@
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE).
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012 Broad Institute, Inc.
* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 4. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 5. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 6. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 7. MISCELLANEOUS
* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.gatk.walkers.coverage;
@ -107,7 +86,7 @@ public class GCContentByInterval extends LocusWalker<Long, Long> {
if (tracker == null)
return null;
int baseIndex = ref.getBaseIndex();
return (baseIndex == BaseUtils.gIndex || baseIndex == BaseUtils.cIndex) ? 1L : 0L;
return (baseIndex == BaseUtils.Base.G.ordinal() || baseIndex == BaseUtils.Base.C.ordinal()) ? 1L : 0L;
}
public Long reduce(Long toAdd, Long runningCount) {

View File

@ -1,3 +1,28 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.gatk.walkers.diagnostics;

View File

@ -1,47 +1,26 @@
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE).
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012 Broad Institute, Inc.
* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 4. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 5. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 6. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 7. MISCELLANEOUS
* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.gatk.walkers.diagnostics;

View File

@ -1,47 +1,26 @@
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE).
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012 Broad Institute, Inc.
* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 4. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 5. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 6. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 7. MISCELLANEOUS
* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.gatk.walkers.diagnostics;

View File

@ -1,47 +1,26 @@
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE).
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012 Broad Institute, Inc.
* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 4. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 5. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 6. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 7. MISCELLANEOUS
* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.gatk.walkers.diagnostics;

View File

@ -1,47 +1,26 @@
/*
* By downloading the PROGRAM you agree to the following terms of use:
*
* BROAD INSTITUTE - SOFTWARE LICENSE AGREEMENT - FOR ACADEMIC NON-COMMERCIAL RESEARCH PURPOSES ONLY
*
* This Agreement is made between the Broad Institute, Inc. with a principal address at 7 Cambridge Center, Cambridge, MA 02142 (BROAD) and the LICENSEE and is effective at the date the downloading is completed (EFFECTIVE DATE).
*
* WHEREAS, LICENSEE desires to license the PROGRAM, as defined hereinafter, and BROAD wishes to have this PROGRAM utilized in the public interest, subject only to the royalty-free, nonexclusive, nontransferable license rights of the United States Government pursuant to 48 CFR 52.227-14; and
* WHEREAS, LICENSEE desires to license the PROGRAM and BROAD desires to grant a license on the following terms and conditions.
* NOW, THEREFORE, in consideration of the promises and covenants made herein, the parties hereto agree as follows:
*
* 1. DEFINITIONS
* 1.1 PROGRAM shall mean copyright in the object code and source code known as GATK2 and related documentation, if any, as they exist on the EFFECTIVE DATE and can be downloaded from http://www.broadinstitute/GATK on the EFFECTIVE DATE.
*
* 2. LICENSE
* 2.1 Grant. Subject to the terms of this Agreement, BROAD hereby grants to LICENSEE, solely for academic non-commercial research purposes, a non-exclusive, non-transferable license to: (a) download, execute and display the PROGRAM and (b) create bug fixes and modify the PROGRAM.
* The LICENSEE may apply the PROGRAM in a pipeline to data owned by users other than the LICENSEE and provide these users the results of the PROGRAM provided LICENSEE does so for academic non-commercial purposes only. For clarification purposes, academic sponsored research is not a commercial use under the terms of this Agreement.
* 2.2 No Sublicensing or Additional Rights. LICENSEE shall not sublicense or distribute the PROGRAM, in whole or in part, without prior written permission from BROAD. LICENSEE shall ensure that all of its users agree to the terms of this Agreement. LICENSEE further agrees that it shall not put the PROGRAM on a network, server, or other similar technology that may be accessed by anyone other than the LICENSEE and its employees and users who have agreed to the terms of this agreement.
* 2.3 License Limitations. Nothing in this Agreement shall be construed to confer any rights upon LICENSEE by implication, estoppel, or otherwise to any computer software, trademark, intellectual property, or patent rights of BROAD, or of any other entity, except as expressly granted herein. LICENSEE agrees that the PROGRAM, in whole or part, shall not be used for any commercial purpose, including without limitation, as the basis of a commercial software or hardware product or to provide services. LICENSEE further agrees that the PROGRAM shall not be copied or otherwise adapted in order to circumvent the need for obtaining a license for use of the PROGRAM.
*
* 3. OWNERSHIP OF INTELLECTUAL PROPERTY
* LICENSEE acknowledges that title to the PROGRAM shall remain with BROAD. The PROGRAM is marked with the following BROAD copyright notice and notice of attribution to contributors. LICENSEE shall retain such notice on all copies. LICENSEE agrees to include appropriate attribution if any results obtained from use of the PROGRAM are included in any publication.
* Copyright 2012 Broad Institute, Inc.
* Notice of attribution: The GATK2 program was made available through the generosity of Medical and Population Genetics program at the Broad Institute, Inc.
* LICENSEE shall not use any trademark or trade name of BROAD, or any variation, adaptation, or abbreviation, of such marks or trade names, or any names of officers, faculty, students, employees, or agents of BROAD except as states above for attribution purposes.
*
* 4. INDEMNIFICATION
* LICENSEE shall indemnify, defend, and hold harmless BROAD, and their respective officers, faculty, students, employees, associated investigators and agents, and their respective successors, heirs and assigns, (Indemnitees), against any liability, damage, loss, or expense (including reasonable attorneys fees and expenses) incurred by or imposed upon any of the Indemnitees in connection with any claims, suits, actions, demands or judgments arising out of any theory of liability (including, without limitation, actions in the form of tort, warranty, or strict liability and regardless of whether such action has any factual basis) pursuant to any right or license granted under this Agreement.
*
* 5. NO REPRESENTATIONS OR WARRANTIES
* THE PROGRAM IS DELIVERED AS IS. BROAD MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE PROGRAM OR THE COPYRIGHT, EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, WHETHER OR NOT DISCOVERABLE. BROAD EXTENDS NO WARRANTIES OF ANY KIND AS TO PROGRAM CONFORMITY WITH WHATEVER USER MANUALS OR OTHER LITERATURE MAY BE ISSUED FROM TIME TO TIME.
* IN NO EVENT SHALL BROAD OR ITS RESPECTIVE DIRECTORS, OFFICERS, EMPLOYEES, AFFILIATED INVESTIGATORS AND AFFILIATES BE LIABLE FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND, INCLUDING, WITHOUT LIMITATION, ECONOMIC DAMAGES OR INJURY TO PROPERTY AND LOST PROFITS, REGARDLESS OF WHETHER BROAD SHALL BE ADVISED, SHALL HAVE OTHER REASON TO KNOW, OR IN FACT SHALL KNOW OF THE POSSIBILITY OF THE FOREGOING.
*
* 6. ASSIGNMENT
* This Agreement is personal to LICENSEE and any rights or obligations assigned by LICENSEE without the prior written consent of BROAD shall be null and void.
*
* 7. MISCELLANEOUS
* 7.1 Export Control. LICENSEE gives assurance that it will comply with all United States export control laws and regulations controlling the export of the PROGRAM, including, without limitation, all Export Administration Regulations of the United States Department of Commerce. Among other things, these laws and regulations prohibit, or require a license for, the export of certain types of software to specified countries.
* 7.2 Termination. LICENSEE shall have the right to terminate this Agreement for any reason upon prior written notice to BROAD. If LICENSEE breaches any provision hereunder, and fails to cure such breach within thirty (30) days, BROAD may terminate this Agreement immediately. Upon termination, LICENSEE shall provide BROAD with written assurance that the original and all copies of the PROGRAM have been destroyed, except that, upon prior written authorization from BROAD, LICENSEE may retain a copy for archive purposes.
* 7.3 Survival. The following provisions shall survive the expiration or termination of this Agreement: Articles 1, 3, 4, 5 and Sections 2.2, 2.3, 7.3, and 7.4.
* 7.4 Notice. Any notices under this Agreement shall be in writing, shall specifically refer to this Agreement, and shall be sent by hand, recognized national overnight courier, confirmed facsimile transmission, confirmed electronic mail, or registered or certified mail, postage prepaid, return receipt requested. All notices under this Agreement shall be deemed effective upon receipt.
* 7.5 Amendment and Waiver; Entire Agreement. This Agreement may be amended, supplemented, or otherwise modified only by means of a written instrument signed by all parties. Any waiver of any rights or failure to act in a specific instance shall relate only to such instance and shall not be construed as an agreement to waive any rights or fail to act in any other instance, whether or not similar. This Agreement constitutes the entire agreement among the parties with respect to its subject matter and supersedes prior agreements or understandings between the parties relating to its subject matter.
* 7.6 Binding Effect; Headings. This Agreement shall be binding upon and inure to the benefit of the parties and their respective permitted successors and assigns. All headings are for convenience only and shall not affect the meaning of any provision of this Agreement.
* 7.7 Governing Law. This Agreement shall be construed, governed, interpreted and applied in accordance with the internal laws of the Commonwealth of Massachusetts, U.S.A., without regard to conflict of laws principles.
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.gatk.walkers.fasta;

View File

@ -37,12 +37,10 @@ import org.broadinstitute.variant.variantcontext.VariantContext;
import java.io.Serializable;
import java.util.*;
public class Haplotype {
protected final byte[] bases;
protected final double[] quals;
public class Haplotype extends Allele {
private GenomeLoc genomeLocation = null;
private HashMap<Integer, VariantContext> eventMap = null;
private boolean isRef = false;
private Cigar cigar;
private int alignmentStartHapwrtRef;
public int leftBreakPoint = 0;
@ -50,44 +48,37 @@ public class Haplotype {
private Event artificialEvent = null;
/**
* Create a simple consensus sequence with provided bases and a uniform quality over all bases of qual
* Main constructor
*
* @param bases bases
* @param qual qual
* @param isRef is reference allele?
*/
public Haplotype( final byte[] bases, final int qual ) {
this.bases = bases.clone();
quals = new double[bases.length];
Arrays.fill(quals, (double)qual);
}
public Haplotype( final byte[] bases, final double[] quals ) {
this.bases = bases.clone();
this.quals = quals.clone();
public Haplotype( final byte[] bases, final boolean isRef ) {
super(bases.clone(), isRef);
}
public Haplotype( final byte[] bases ) {
this(bases, 0);
this(bases, false);
}
protected Haplotype( final byte[] bases, final Event artificialEvent ) {
this(bases, 0);
this(bases, false);
this.artificialEvent = artificialEvent;
}
public Haplotype( final byte[] bases, final GenomeLoc loc ) {
this(bases);
this(bases, false);
this.genomeLocation = loc;
}
@Override
public boolean equals( Object h ) {
return h instanceof Haplotype && Arrays.equals(bases, ((Haplotype) h).bases);
return h instanceof Haplotype && Arrays.equals(getBases(), ((Haplotype) h).getBases());
}
@Override
public int hashCode() {
return Arrays.hashCode(bases);
return Arrays.hashCode(getBases());
}
public HashMap<Integer, VariantContext> getEventMap() {
@ -98,32 +89,13 @@ public class Haplotype {
this.eventMap = eventMap;
}
public boolean isReference() {
return isRef;
}
public void setIsReference( boolean isRef ) {
this.isRef = isRef;
}
public double getQualitySum() {
double s = 0;
for (int k=0; k < bases.length; k++) {
s += quals[k];
}
return s;
}
@Override
public String toString() {
return new String(bases);
return getDisplayString();
}
public double[] getQuals() {
return quals.clone();
}
public byte[] getBases() {
return bases.clone();
return super.getBases().clone();
}
public long getStartPosition() {
@ -178,19 +150,23 @@ public class Haplotype {
public Haplotype insertAllele( final Allele refAllele, final Allele altAllele, final int refInsertLocation, final int genomicInsertLocation ) {
// refInsertLocation is in ref haplotype offset coordinates NOT genomic coordinates
final int haplotypeInsertLocation = ReadUtils.getReadCoordinateForReferenceCoordinate(alignmentStartHapwrtRef, cigar, refInsertLocation, ReadUtils.ClippingTail.RIGHT_TAIL, true);
if( haplotypeInsertLocation == -1 || haplotypeInsertLocation + refAllele.length() >= bases.length ) { // desired change falls inside deletion so don't bother creating a new haplotype
if( haplotypeInsertLocation == -1 || haplotypeInsertLocation + refAllele.length() >= getBases().length ) { // desired change falls inside deletion so don't bother creating a new haplotype
return null;
}
byte[] newHaplotypeBases = new byte[]{};
newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, ArrayUtils.subarray(bases, 0, haplotypeInsertLocation)); // bases before the variant
newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, ArrayUtils.subarray(getBases(), 0, haplotypeInsertLocation)); // bases before the variant
newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, altAllele.getBases()); // the alt allele of the variant
newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, ArrayUtils.subarray(bases, haplotypeInsertLocation + refAllele.length(), bases.length)); // bases after the variant
newHaplotypeBases = ArrayUtils.addAll(newHaplotypeBases, ArrayUtils.subarray(getBases(), haplotypeInsertLocation + refAllele.length(), getBases().length)); // bases after the variant
return new Haplotype(newHaplotypeBases, new Event(refAllele, altAllele, genomicInsertLocation));
}
public static class HaplotypeBaseComparator implements Comparator<Haplotype>, Serializable {
@Override
public int compare( final Haplotype hap1, final Haplotype hap2 ) {
return compareHaplotypeBases(hap1, hap2);
}
public static int compareHaplotypeBases(final Haplotype hap1, final Haplotype hap2) {
final byte[] arr1 = hap1.getBases();
final byte[] arr2 = hap2.getBases();
// compares byte arrays using lexical ordering

View File

@ -381,9 +381,9 @@ public class ReadClipper {
* @return a new read without adaptor sequence
*/
private GATKSAMRecord hardClipAdaptorSequence () {
final Integer adaptorBoundary = ReadUtils.getAdaptorBoundary(read);
final int adaptorBoundary = ReadUtils.getAdaptorBoundary(read);
if (adaptorBoundary == null || !ReadUtils.isInsideRead(read, adaptorBoundary))
if (adaptorBoundary == ReadUtils.CANNOT_COMPUTE_ADAPTOR_BOUNDARY || !ReadUtils.isInsideRead(read, adaptorBoundary))
return read;
return read.getReadNegativeStrandFlag() ? hardClipByReferenceCoordinatesLeftTail(adaptorBoundary) : hardClipByReferenceCoordinatesRightTail(adaptorBoundary);

View File

@ -33,6 +33,7 @@ import net.sf.samtools.SAMSequenceRecord;
import net.sf.samtools.util.StringUtil;
import org.apache.log4j.Priority;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.variant.utils.BaseUtils;
import java.io.File;
import java.io.FileNotFoundException;
@ -41,9 +42,10 @@ import java.util.Arrays;
/**
* A caching version of the IndexedFastaSequenceFile that avoids going to disk as often as the raw indexer.
*
* Thread-safe! Uses a thread-local cache
* Thread-safe! Uses a thread-local cache.
*
* Automatically upper-cases the bases coming in, unless they the flag preserveCase is explicitly set
* Automatically upper-cases the bases coming in, unless the flag preserveCase is explicitly set.
* Automatically converts IUPAC bases to Ns, unless the flag preserveIUPAC is explicitly set.
*/
public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile {
protected static final org.apache.log4j.Logger logger = org.apache.log4j.Logger.getLogger(CachingIndexedFastaSequenceFile.class);
@ -64,10 +66,15 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile {
private final long cacheMissBackup;
/**
* If true, we will preserve the case of the original base in the genome, not
* If true, we will preserve the case of the original base in the genome
*/
private final boolean preserveCase;
/**
* If true, we will preserve the IUPAC bases in the genome
*/
private final boolean preserveIUPAC;
// information about checking efficiency
long cacheHits = 0;
long cacheMisses = 0;
@ -97,13 +104,15 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile {
* @param index the index of the fasta file, used for efficient random access
* @param cacheSize the size in bp of the cache we will use for this reader
* @param preserveCase If true, we will keep the case of the underlying bases in the FASTA, otherwise everything is converted to upper case
* @param preserveIUPAC If true, we will keep the IUPAC bases in the FASTA, otherwise they are converted to Ns
*/
public CachingIndexedFastaSequenceFile(final File fasta, final FastaSequenceIndex index, final long cacheSize, final boolean preserveCase) {
public CachingIndexedFastaSequenceFile(final File fasta, final FastaSequenceIndex index, final long cacheSize, final boolean preserveCase, final boolean preserveIUPAC) {
super(fasta, index);
if ( cacheSize < 0 ) throw new IllegalArgumentException("cacheSize must be > 0");
this.cacheSize = cacheSize;
this.cacheMissBackup = Math.max(cacheSize / 1000, 1);
this.preserveCase = preserveCase;
this.preserveIUPAC = preserveIUPAC;
}
/**
@ -116,25 +125,15 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile {
* @param cacheSize the size of the cache to use in this CachingIndexedFastaReader, must be >= 0
* @param preserveCase If true, we will keep the case of the underlying bases in the FASTA, otherwise everything is converted to upper case
*/
public CachingIndexedFastaSequenceFile(final File fasta, final long cacheSize, final boolean preserveCase ) throws FileNotFoundException {
public CachingIndexedFastaSequenceFile(final File fasta, final long cacheSize, final boolean preserveCase, final boolean preserveIUPAC) throws FileNotFoundException {
super(fasta);
if ( cacheSize < 0 ) throw new IllegalArgumentException("cacheSize must be > 0");
this.cacheSize = cacheSize;
this.cacheMissBackup = Math.max(cacheSize / 1000, 1);
this.preserveCase = preserveCase;
this.preserveIUPAC = preserveIUPAC;
}
// /**
// * Open the given indexed fasta sequence file. Throw an exception if the file cannot be opened.
// *
// * @param fasta The file to open.
// * @param index Pre-built FastaSequenceIndex, for the case in which one does not exist on disk.
// * @throws java.io.FileNotFoundException If the fasta or any of its supporting files cannot be found.
// */
// public CachingIndexedFastaSequenceFile(final File fasta, final FastaSequenceIndex index) {
// this(fasta, index, DEFAULT_CACHE_SIZE);
// }
/**
* Same as general constructor but allows one to override the default cacheSize
*
@ -145,7 +144,7 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile {
* @param cacheSize the size in bp of the cache we will use for this reader
*/
public CachingIndexedFastaSequenceFile(final File fasta, final FastaSequenceIndex index, final long cacheSize) {
this(fasta, index, cacheSize, false);
this(fasta, index, cacheSize, false, false);
}
/**
@ -169,7 +168,7 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile {
* @param preserveCase If true, we will keep the case of the underlying bases in the FASTA, otherwise everything is converted to upper case
*/
public CachingIndexedFastaSequenceFile(final File fasta, final boolean preserveCase) throws FileNotFoundException {
this(fasta, DEFAULT_CACHE_SIZE, preserveCase);
this(fasta, DEFAULT_CACHE_SIZE, preserveCase, false);
}
/**
@ -182,7 +181,7 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile {
* @param cacheSize the size of the cache to use in this CachingIndexedFastaReader, must be >= 0
*/
public CachingIndexedFastaSequenceFile(final File fasta, final long cacheSize ) throws FileNotFoundException {
this(fasta, cacheSize, false);
this(fasta, cacheSize, false, false);
}
/**
@ -240,6 +239,15 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile {
return ! isPreservingCase();
}
/**
* Is this CachingIndexedFastaReader keeping the IUPAC bases in the fasta, or is it turning them into Ns?
*
* @return true if the IUPAC bases coming from this reader are not modified
*/
public boolean isPreservingIUPAC() {
return preserveIUPAC;
}
/**
* Gets the subsequence of the contig in the range [start,stop]
*
@ -253,7 +261,7 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile {
* all of the bases in the ReferenceSequence returned by this method will be upper cased.
*/
@Override
public ReferenceSequence getSubsequenceAt( final String contig, final long start, final long stop ) {
public ReferenceSequence getSubsequenceAt( final String contig, long start, final long stop ) {
final ReferenceSequence result;
final Cache myCache = cache.get();
@ -261,8 +269,9 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile {
cacheMisses++;
result = super.getSubsequenceAt(contig, start, stop);
if ( ! preserveCase ) StringUtil.toUpperCase(result.getBases());
if ( ! preserveIUPAC ) BaseUtils.convertIUPACtoN(result.getBases(), true, start < 1);
} else {
// todo -- potential optimization is to check if contig.name == contig, as this in generally will be true
// todo -- potential optimization is to check if contig.name == contig, as this in general will be true
SAMSequenceRecord contigInfo = super.getSequenceDictionary().getSequence(contig);
if (stop > contigInfo.getSequenceLength())
@ -276,6 +285,7 @@ public class CachingIndexedFastaSequenceFile extends IndexedFastaSequenceFile {
// convert all of the bases in the sequence to upper case if we aren't preserving cases
if ( ! preserveCase ) StringUtil.toUpperCase(myCache.seq.getBases());
if ( ! preserveIUPAC ) BaseUtils.convertIUPACtoN(myCache.seq.getBases(), true, myCache.start == 0);
} else {
cacheHits++;
}

View File

@ -32,6 +32,7 @@ import net.sf.picard.util.IntervalList;
import net.sf.samtools.SAMFileHeader;
import org.apache.log4j.Logger;
import org.broad.tribble.Feature;
import org.broadinstitute.sting.commandline.IntervalArgumentCollection;
import org.broadinstitute.sting.commandline.IntervalBinding;
import org.broadinstitute.sting.gatk.datasources.reference.ReferenceDataSource;
import org.broadinstitute.sting.utils.GenomeLoc;
@ -534,6 +535,47 @@ public class IntervalUtils {
}
}
public static GenomeLocSortedSet parseIntervalArguments(final ReferenceDataSource referenceDataSource, IntervalArgumentCollection argCollection) {
GenomeLocSortedSet intervals = null;
// return if no interval arguments at all
if ( argCollection.intervals == null && argCollection.excludeIntervals == null )
return intervals;
// Note that the use of '-L all' is no longer supported.
// if include argument isn't given, create new set of all possible intervals
final Pair<GenomeLocSortedSet, GenomeLocSortedSet> includeExcludePair = IntervalUtils.parseIntervalBindingsPair(
referenceDataSource,
argCollection.intervals,
argCollection.intervalSetRule, argCollection.intervalMerging, argCollection.intervalPadding,
argCollection.excludeIntervals);
final GenomeLocSortedSet includeSortedSet = includeExcludePair.getFirst();
final GenomeLocSortedSet excludeSortedSet = includeExcludePair.getSecond();
// if no exclude arguments, can return parseIntervalArguments directly
if ( excludeSortedSet == null )
intervals = includeSortedSet;
// otherwise there are exclude arguments => must merge include and exclude GenomeLocSortedSets
else {
intervals = includeSortedSet.subtractRegions(excludeSortedSet);
// logging messages only printed when exclude (-XL) arguments are given
final long toPruneSize = includeSortedSet.coveredSize();
final long toExcludeSize = excludeSortedSet.coveredSize();
final long intervalSize = intervals.coveredSize();
logger.info(String.format("Initial include intervals span %d loci; exclude intervals span %d loci", toPruneSize, toExcludeSize));
logger.info(String.format("Excluding %d loci from original intervals (%.2f%% reduction)",
toPruneSize - intervalSize, (toPruneSize - intervalSize) / (0.01 * toPruneSize)));
}
logger.info(String.format("Processing %d bp from intervals", intervals.coveredSize()));
return intervals;
}
public static Pair<GenomeLocSortedSet, GenomeLocSortedSet> parseIntervalBindingsPair(
final ReferenceDataSource referenceDataSource,
final List<IntervalBinding<Feature>> intervals,

View File

@ -0,0 +1,370 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.utils.locusiterator;
import com.google.java.contract.Ensures;
import com.google.java.contract.Invariant;
import com.google.java.contract.Requires;
import net.sf.samtools.Cigar;
import net.sf.samtools.CigarElement;
import net.sf.samtools.CigarOperator;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.exceptions.UserException;
import org.broadinstitute.sting.utils.pileup.PileupElement;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
/**
* Steps a single read along its alignment to the genome
*
* The logical model for generating extended events is as follows: the "record state"
* implements the traversal along the reference; thus stepForwardOnGenome() returns
* on every and only on actual reference bases. This can be a (mis)match or a deletion
* (in the latter case, we still return on every individual reference base the deletion spans).
*
* User: depristo
* Date: 1/5/13
* Time: 1:08 PM
*/
@Invariant({
"nCigarElements >= 0",
"cigar != null",
"read != null",
"currentCigarElementOffset >= -1",
"currentCigarElementOffset <= nCigarElements"
})
public class AlignmentStateMachine {
/**
* Our read
*/
private final GATKSAMRecord read;
private final Cigar cigar;
private final int nCigarElements;
private int currentCigarElementOffset = -1;
/**
* how far are we offset from the start of the read bases?
*/
private int readOffset;
/**
* how far are we offset from the alignment start on the genome?
*/
private int genomeOffset;
/**
* Our cigar element
*/
private CigarElement currentElement;
/**
* how far are we into our cigarElement?
*/
private int offsetIntoCurrentCigarElement;
@Requires({"read != null", "read.getAlignmentStart() != -1", "read.getCigar() != null"})
public AlignmentStateMachine(final GATKSAMRecord read) {
this.read = read;
this.cigar = read.getCigar();
this.nCigarElements = cigar.numCigarElements();
initializeAsLeftEdge();
}
/**
* Initialize the state variables to put this machine one bp before the
* start of the alignment, so that a call to stepForwardOnGenome() will advance
* us to the first proper location
*/
@Ensures("isLeftEdge()")
private void initializeAsLeftEdge() {
readOffset = offsetIntoCurrentCigarElement = genomeOffset = -1;
currentElement = null;
}
/**
* Get the read we are aligning to the genome
* @return a non-null GATKSAMRecord
*/
@Ensures("result != null")
public GATKSAMRecord getRead() {
return read;
}
/**
* Get the reference index of the underlying read
*
* @return the reference index of the read
*/
@Ensures("result == getRead().getReferenceIndex()")
public int getReferenceIndex() {
return getRead().getReferenceIndex();
}
/**
* Is this the left edge state? I.e., one that is before or after the current read?
* @return true if this state is an edge state, false otherwise
*/
public boolean isLeftEdge() {
return readOffset == -1;
}
/**
* Are we on the right edge? I.e., is the current state off the right of the alignment?
* @return true if off the right edge, false if otherwise
*/
public boolean isRightEdge() {
return readOffset == read.getReadLength();
}
/**
* What is our current offset in the read's bases that aligns us with the reference genome?
*
* @return the current read offset position. If an edge will be == -1
*/
@Ensures("result >= -1")
public int getReadOffset() {
return readOffset;
}
/**
* What is the current offset w.r.t. the alignment state that aligns us to the readOffset?
*
* @return the current offset from the alignment start on the genome. If this state is
* at the left edge the result will be -1;
*/
@Ensures("result >= -1")
public int getGenomeOffset() {
return genomeOffset;
}
/**
* Get the position (1-based as standard) of the current alignment on the genome w.r.t. the read's alignment start
* @return the position on the genome of the current state in absolute coordinates
*/
@Ensures("result > 0")
public int getGenomePosition() {
return read.getAlignmentStart() + getGenomeOffset();
}
/**
* Gets #getGenomePosition but as a 1 bp GenomeLoc
* @param genomeLocParser the parser to use to create the genome loc
* @return a non-null genome location with start position of getGenomePosition
*/
@Requires("genomeLocParser != null")
@Ensures("result != null")
public GenomeLoc getLocation(final GenomeLocParser genomeLocParser) {
// TODO -- may return wonky results if on an edge (could be 0 or could be beyond genome location)
return genomeLocParser.createGenomeLoc(read.getReferenceName(), getGenomePosition());
}
/**
* Get the cigar element we're currently aligning with.
*
* For example, if the cigar string is 2M2D2M and we're in the second step of the
* first 2M, then this function returns the element 2M. After calling stepForwardOnGenome
* this function would return 2D.
*
* @return the cigar element, or null if we're the left edge
*/
@Ensures("result != null || isLeftEdge() || isRightEdge()")
public CigarElement getCurrentCigarElement() {
return currentElement;
}
/**
* Get the offset of the current cigar element among all cigar elements in the read
*
* Suppose our read's cigar is 1M2D3M, and we're at the first 1M. This would
* return 0. Stepping forward puts us in the 2D, so our offset is 1. Another
* step forward would result in a 1 again (we're in the second position of the 2D).
* Finally, one more step forward brings us to 2 (for the 3M element)
*
* @return the offset of the current cigar element in the reads's cigar. Will return -1 for
* when the state is on the left edge, and be == the number of cigar elements in the
* read when we're past the last position on the genome
*/
@Ensures({"result >= -1", "result <= nCigarElements"})
public int getCurrentCigarElementOffset() {
return currentCigarElementOffset;
}
/**
* Get the offset of the current state into the current cigar element
*
* That is, suppose we have a read with cigar 2M3D4M, and we're right at
* the second M position. offsetIntoCurrentCigarElement would be 1, as
* it's two elements into the 2M cigar. Now stepping forward we'd be
* in cigar element 3D, and our offsetIntoCurrentCigarElement would be 0.
*
* @return the offset (from 0) of the current state in the current cigar element.
* Will be 0 on the right edge, and -1 on the left.
*/
@Ensures({"result >= 0 || (result == -1 && isLeftEdge())", "!isRightEdge() || result == 0"})
public int getOffsetIntoCurrentCigarElement() {
return offsetIntoCurrentCigarElement;
}
/**
* Convenience accessor of the CigarOperator of the current cigar element
*
* Robust to the case where we're on the edge, and currentElement is null, in which
* case this function returns null as well
*
* @return null if this is an edge state
*/
@Ensures("result != null || isLeftEdge() || isRightEdge()")
public CigarOperator getCigarOperator() {
return currentElement == null ? null : currentElement.getOperator();
}
@Override
public String toString() {
return String.format("%s ro=%d go=%d cec=%d %s", read.getReadName(), readOffset, genomeOffset, offsetIntoCurrentCigarElement, currentElement);
}
// -----------------------------------------------------------------------------------------------
//
// Code for setting up prev / next states
//
// -----------------------------------------------------------------------------------------------
/**
* Step the state machine forward one unit
*
* Takes the current state of this machine, and advances the state until the next on-genome
* cigar element (M, X, =, D) is encountered, at which point this function returns with the
* cigar operator of the current element.
*
* Assumes that the AlignmentStateMachine is in the left edge state at the start, so that
* stepForwardOnGenome() can be called to move the machine to the first alignment position. That
* is, the normal use of this code is:
*
* AlignmentStateMachine machine = new AlignmentStateMachine(read)
* machine.stepForwardOnGenome()
* // now the machine is at the first position on the genome
*
* When stepForwardOnGenome() advances off the right edge of the read, the state machine is
* left in a state such that isRightEdge() returns true and returns null, indicating the
* the machine cannot advance further. The machine may explode, though this is not contracted,
* if stepForwardOnGenome() is called after a previous call returned null.
*
* @return the operator of the cigar element that machine stopped at, null if we advanced off the end of the read
*/
@Ensures("result != null || isRightEdge()")
public CigarOperator stepForwardOnGenome() {
// loop until we either find a cigar element step that moves us one base on the genome, or we run
// out of cigar elements
while ( true ) {
// we enter this method with readOffset = index of the last processed base on the read
// (-1 if we did not process a single base yet); this can be last matching base,
// or last base of an insertion
if (currentElement == null || (offsetIntoCurrentCigarElement + 1) >= currentElement.getLength()) {
currentCigarElementOffset++;
if (currentCigarElementOffset < nCigarElements) {
currentElement = cigar.getCigarElement(currentCigarElementOffset);
offsetIntoCurrentCigarElement = -1;
// next line: guards against cigar elements of length 0; when new cigar element is retrieved,
// we reenter in order to re-check offsetIntoCurrentCigarElement against currentElement's length
continue;
} else {
if (currentElement != null && currentElement.getOperator() == CigarOperator.D)
throw new UserException.MalformedBAM(read, "read ends with deletion. Cigar: " + read.getCigarString() + ". Although the SAM spec technically permits such reads, this is often indicative of malformed files. If you are sure you want to use this file, re-run your analysis with the extra option: -rf BadCigar");
// we're done, so set the offset of the cigar to 0 for cleanliness, as well as the current element
offsetIntoCurrentCigarElement = 0;
readOffset = read.getReadLength();
currentElement = null;
// Reads that contain indels model the genomeOffset as the following base in the reference. Because
// we fall into this else block only when indels end the read, increment genomeOffset such that the
// current offset of this read is the next ref base after the end of the indel. This position will
// model a point on the reference somewhere after the end of the read.
genomeOffset++; // extended events need that. Logically, it's legal to advance the genomic offset here:
// we do step forward on the ref, and by returning null we also indicate that we are past the read end.
return null;
}
}
offsetIntoCurrentCigarElement++;
boolean done = false;
switch (currentElement.getOperator()) {
case H: // ignore hard clips
case P: // ignore pads
offsetIntoCurrentCigarElement = currentElement.getLength();
break;
case I: // insertion w.r.t. the reference
case S: // soft clip
offsetIntoCurrentCigarElement = currentElement.getLength();
readOffset += currentElement.getLength();
break;
case D: // deletion w.r.t. the reference
if (readOffset < 0) // we don't want reads starting with deletion, this is a malformed cigar string
throw new UserException.MalformedBAM(read, "read starts with deletion. Cigar: " + read.getCigarString() + ". Although the SAM spec technically permits such reads, this is often indicative of malformed files. If you are sure you want to use this file, re-run your analysis with the extra option: -rf BadCigar");
// should be the same as N case
genomeOffset++;
done = true;
break;
case N: // reference skip (looks and gets processed just like a "deletion", just different logical meaning)
genomeOffset++;
done = true;
break;
case M:
case EQ:
case X:
readOffset++;
genomeOffset++;
done = true;
break;
default:
throw new IllegalStateException("Case statement didn't deal with cigar op: " + currentElement.getOperator());
}
if ( done )
return currentElement.getOperator();
}
}
/**
* Create a new PileupElement based on the current state of this element
*
* Must not be a left or right edge
*
* @return a pileup element
*/
@Ensures("result != null")
public final PileupElement makePileupElement() {
if ( isLeftEdge() || isRightEdge() )
throw new IllegalStateException("Cannot make a pileup element from an edge alignment state");
return new PileupElement(read,
getReadOffset(),
getCurrentCigarElement(),
getCurrentCigarElementOffset(),
getOffsetIntoCurrentCigarElement());
}
}

View File

@ -0,0 +1,53 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.utils.locusiterator;
/**
* Simple wrapper about the information LIBS needs about downsampling
*
* User: depristo
* Date: 1/5/13
* Time: 1:26 PM
*/
class LIBSDownsamplingInfo {
public final static LIBSDownsamplingInfo NO_DOWNSAMPLING = new LIBSDownsamplingInfo(false, -1);
final private boolean performDownsampling;
final private int toCoverage;
public LIBSDownsamplingInfo(boolean performDownsampling, int toCoverage) {
this.performDownsampling = performDownsampling;
this.toCoverage = toCoverage;
}
public boolean isPerformDownsampling() {
return performDownsampling;
}
public int getToCoverage() {
return toCoverage;
}
}

View File

@ -0,0 +1,198 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.utils.locusiterator;
import net.sf.picard.reference.IndexedFastaSequenceFile;
import net.sf.samtools.SAMFileHeader;
import net.sf.samtools.SAMFileReader;
import net.sf.samtools.SAMReadGroupRecord;
import net.sf.samtools.SAMRecordIterator;
import org.apache.log4j.Logger;
import org.broadinstitute.sting.commandline.Argument;
import org.broadinstitute.sting.commandline.CommandLineProgram;
import org.broadinstitute.sting.commandline.Input;
import org.broadinstitute.sting.gatk.ReadProperties;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.iterators.GATKSAMIterator;
import org.broadinstitute.sting.utils.*;
import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile;
import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import org.broadinstitute.sting.utils.sam.GATKSamRecordFactory;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.*;
/**
* Caliper microbenchmark of fragment pileup
*/
public class LIBSPerformance extends CommandLineProgram {
private static Logger logger = Logger.getLogger(LIBSPerformance.class);
@Input(fullName = "input_file", shortName = "I", doc = "SAM or BAM file(s)", required = true)
public File samFile = null;
@Input(fullName = "reference_sequence", shortName = "R", doc = "Reference sequence file", required = true)
public File referenceFile = null;
@Argument(fullName = "L", shortName = "L", doc = "Query location", required = false)
public String location = null;
@Argument(fullName = "dt", shortName = "dt", doc = "Enable downsampling", required = false)
public boolean downsample = false;
@Override
public int execute() throws IOException {
final IndexedFastaSequenceFile reference = new CachingIndexedFastaSequenceFile(referenceFile);
final GenomeLocParser genomeLocParser = new GenomeLocParser(reference);
final SAMFileReader reader = new SAMFileReader(samFile);
reader.setSAMRecordFactory(new GATKSamRecordFactory());
SAMRecordIterator rawIterator;
if ( location == null )
rawIterator = reader.iterator();
else {
final GenomeLoc loc = genomeLocParser.parseGenomeLoc(location);
rawIterator = reader.query(loc.getContig(), loc.getStart(), loc.getStop(), false);
}
final GATKSAMIterator iterator = new GATKSAMIterator(rawIterator);
final Set<String> samples = new HashSet<String>();
for ( final SAMReadGroupRecord rg : reader.getFileHeader().getReadGroups() )
samples.add(rg.getSample());
final LIBSDownsamplingInfo ds = new LIBSDownsamplingInfo(downsample, 250);
final LocusIteratorByState libs =
new LocusIteratorByState(
iterator,
ds,
true,
genomeLocParser,
samples,
false);
final SimpleTimer timer = new SimpleTimer().start();
int bp = 0;
double lastElapsed = 0;
while ( libs.hasNext() ) {
AlignmentContext context = libs.next();
bp++;
if ( timer.getElapsedTime() - lastElapsed > 10 ) {
logger.info(bp + " iterations at " + context.getLocation());
lastElapsed = timer.getElapsedTime();
}
}
logger.info(String.format("runtime in seconds: %.2f", timer.getElapsedTime()));
return 0;
}
// private void syntheticTests() {
// final int readLength = 101;
// final int nReads = 10000;
// final int locus = 1;
//
// SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
// final GenomeLocParser genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
//
// int nIterations = 0;
// for ( final String cigar : Arrays.asList("101M", "50M10I40M", "50M10D40M") ) {
// GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, locus, readLength);
// read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
// final byte[] quals = new byte[readLength];
// for ( int i = 0; i < readLength; i++ )
// quals[i] = (byte)(i % QualityUtils.MAX_QUAL_SCORE);
// read.setBaseQualities(quals);
// read.setCigarString(cigar);
//
// for ( int j = 0; j < nReads; j++ ) {
// for ( int i = 0; i < rep; i++ ) {
// switch ( op ) {
// case NEW_STATE:
// {
// final AlignmentStateMachine alignmentStateMachine = new AlignmentStateMachine(read);
// while ( alignmentStateMachine.stepForwardOnGenome() != null ) {
// nIterations++;
// }
// }
// break;
//// case OLD_STATE:
//// {
//// final SAMRecordAlignmentState alignmentStateMachine = new SAMRecordAlignmentState(read);
//// while ( alignmentStateMachine.stepForwardOnGenome() != null ) {
//// alignmentStateMachine.getRead();
//// nIterations++;
//// }
//// }
//// break;
// case NEW_LIBS:
// {
// final List<GATKSAMRecord> reads = Collections.nCopies(30, read);
// final org.broadinstitute.sting.utils.locusiterator.LocusIteratorByState libs =
// new org.broadinstitute.sting.utils.locusiterator.LocusIteratorByState(
// new LocusIteratorByStateBaseTest.FakeCloseableIterator<GATKSAMRecord>(reads.iterator()),
// LocusIteratorByStateBaseTest.createTestReadProperties(),
// genomeLocParser,
// LocusIteratorByState.sampleListForSAMWithoutReadGroups());
//
// while ( libs.hasNext() ) {
// AlignmentContext context = libs.next();
// }
// }
// }
// }
// }
// }
//
// System.out.printf("iterations %d%n", nIterations);
// }
/**
* Required main method implementation.
* @param argv Command-line argument text.
* @throws Exception on error.
*/
public static void main(String[] argv) throws Exception {
int returnCode = 0;
try {
LIBSPerformance instance = new LIBSPerformance();
start(instance, argv);
returnCode = 0;
} catch(Exception ex) {
returnCode = 1;
ex.printStackTrace();
throw ex;
} finally {
System.exit(returnCode);
}
}
}

View File

@ -0,0 +1,36 @@
package org.broadinstitute.sting.utils.locusiterator;
import net.sf.samtools.util.CloseableIterator;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import java.util.Iterator;
/**
* Iterator that traverses a SAM File, accumulating information on a per-locus basis
*/
public abstract class LocusIterator implements Iterable<AlignmentContext>, CloseableIterator<AlignmentContext> {
// -----------------------------------------------------------------------------------------------------------------
//
// constructors and other basic operations
//
// -----------------------------------------------------------------------------------------------------------------
public Iterator<AlignmentContext> iterator() {
return this;
}
public void close() {
//this.it.close();
}
public abstract boolean hasNext();
public abstract AlignmentContext next();
// TODO -- remove me when ART testing is done
public LocusIteratorByState getLIBS() {
return null;
}
public void remove() {
throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
}
}

View File

@ -0,0 +1,393 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.utils.locusiterator;
import com.google.java.contract.Ensures;
import com.google.java.contract.Requires;
import net.sf.samtools.CigarOperator;
import org.apache.log4j.Logger;
import org.broadinstitute.sting.gatk.ReadProperties;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.downsampling.DownsampleType;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.pileup.*;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import org.broadinstitute.sting.utils.sam.ReadUtils;
import java.util.*;
/**
* Iterator that traverses a SAM File, accumulating information on a per-locus basis
*
* Produces AlignmentContext objects, that contain ReadBackedPileups of PileupElements. This
* class has its core job of converting an iterator of ordered SAMRecords into those
* RBPs.
*
* There are a few constraints on required and ensured by LIBS:
*
* -- Requires the Iterator<GATKSAMRecord> to returns reads in coordinate sorted order, consistent with the ordering
* defined by the SAM file format. That that for performance reasons this constraint isn't actually enforced.
* The behavior of LIBS is undefined in the case where the reads are badly ordered.
* -- The reads in the ReadBackedPileup are themselves in the order of appearance of the reads from the iterator.
* That is, the pileup is ordered in a way consistent with the SAM coordinate ordering
* -- Only aligned reads with at least one on-genomic cigar operator are passed on in the pileups. That is,
* unmapped reads or reads that are all insertions (10I) or soft clipped (10S) are not passed on.
* -- LIBS can perform per-sample downsampling of a variety of kinds.
* -- Because of downsampling there's no guarantee that:
* -- A read that could be aligned to a position will actually occur in the pileup (downsampled away)
* -- A read that appears in a previous pileup that could align to a future position will actually occur
* in that pileup. That is, a read might show up at position i but be downsampled away in the pileup at j
* -- LIBS can optionally capture all of the reads that come off the iterator, before any leveling downsampling
* occurs, if requested. This allows users of LIBS to see both a ReadBackedPileup view of the data as well as
* a stream of unique, sorted reads
*/
public final class LocusIteratorByState extends LocusIterator {
/**
* our log, which we want to capture anything from this class
*/
private final static Logger logger = Logger.getLogger(LocusIteratorByState.class);
// -----------------------------------------------------------------------------------------------------------------
//
// member fields
//
// -----------------------------------------------------------------------------------------------------------------
/**
* Used to create new GenomeLocs as needed
*/
private final GenomeLocParser genomeLocParser;
/**
* A complete list of all samples that may come out of the reads. Must be
* comprehensive.
*/
private final ArrayList<String> samples;
/**
* The system that maps incoming reads from the iterator to their pileup states
*/
private final ReadStateManager readStates;
/**
* Should we include reads in the pileup which are aligned with a deletion operator to the reference?
*/
private final boolean includeReadsWithDeletionAtLoci;
/**
* The next alignment context. A non-null value means that a
* context is waiting from hasNext() for sending off to the next next() call. A null
* value means that either hasNext() has not been called at all or that
* the underlying iterator is exhausted
*/
private AlignmentContext nextAlignmentContext;
// -----------------------------------------------------------------------------------------------------------------
//
// constructors and other basic operations
//
// -----------------------------------------------------------------------------------------------------------------
/**
* Create a new LocusIteratorByState
*
* @param samIterator the iterator of reads to process into pileups. Reads must be ordered
* according to standard coordinate-sorted BAM conventions
* @param readInformation meta-information about how to process the reads (i.e., should we do downsampling?)
* @param genomeLocParser used to create genome locs
* @param samples a complete list of samples present in the read groups for the reads coming from samIterator.
* This is generally just the set of read group sample fields in the SAMFileHeader. This
* list of samples may contain a null element, and all reads without read groups will
* be mapped to this null sample
*/
public LocusIteratorByState(final Iterator<GATKSAMRecord> samIterator,
final ReadProperties readInformation,
final GenomeLocParser genomeLocParser,
final Collection<String> samples) {
this(samIterator,
toDownsamplingInfo(readInformation),
readInformation.includeReadsWithDeletionAtLoci(),
genomeLocParser,
samples,
readInformation.keepUniqueReadListInLIBS());
}
/**
* Create a new LocusIteratorByState
*
* @param samIterator the iterator of reads to process into pileups. Reads must be ordered
* according to standard coordinate-sorted BAM conventions
* @param downsamplingInfo meta-information about how to downsampling the reads
* @param genomeLocParser used to create genome locs
* @param samples a complete list of samples present in the read groups for the reads coming from samIterator.
* This is generally just the set of read group sample fields in the SAMFileHeader. This
* list of samples may contain a null element, and all reads without read groups will
* be mapped to this null sample
* @param maintainUniqueReadsList if true, we will keep the unique reads from off the samIterator and make them
* available via the transferReadsFromAllPreviousPileups interface
*/ protected LocusIteratorByState(final Iterator<GATKSAMRecord> samIterator,
final LIBSDownsamplingInfo downsamplingInfo,
final boolean includeReadsWithDeletionAtLoci,
final GenomeLocParser genomeLocParser,
final Collection<String> samples,
final boolean maintainUniqueReadsList) {
if ( samIterator == null ) throw new IllegalArgumentException("samIterator cannot be null");
if ( downsamplingInfo == null ) throw new IllegalArgumentException("downsamplingInfo cannot be null");
if ( genomeLocParser == null ) throw new IllegalArgumentException("genomeLocParser cannot be null");
if ( samples == null ) throw new IllegalArgumentException("Samples cannot be null");
// currently the GATK expects this LocusIteratorByState to accept empty sample lists, when
// there's no read data. So we need to throw this error only when samIterator.hasNext() is true
if (samples.isEmpty() && samIterator.hasNext()) {
throw new IllegalArgumentException("samples list must not be empty");
}
this.genomeLocParser = genomeLocParser;
this.includeReadsWithDeletionAtLoci = includeReadsWithDeletionAtLoci;
this.samples = new ArrayList<String>(samples);
this.readStates = new ReadStateManager(samIterator, this.samples, downsamplingInfo, maintainUniqueReadsList);
}
@Override
public Iterator<AlignmentContext> iterator() {
return this;
}
/**
* Get the current location (i.e., the bp of the center of the pileup) of the pileup, or null if not anywhere yet
*
* Assumes that read states is updated to reflect the current pileup position, but not advanced to the
* next location.
*
* @return the location of the current pileup, or null if we're after all reads
*/
private GenomeLoc getLocation() {
return readStates.isEmpty() ? null : readStates.getFirst().getLocation(genomeLocParser);
}
// -----------------------------------------------------------------------------------------------------------------
//
// next() routine and associated collection operations
//
// -----------------------------------------------------------------------------------------------------------------
/**
* Is there another pileup available?
* @return
*/
@Override
public boolean hasNext() {
lazyLoadNextAlignmentContext();
return nextAlignmentContext != null;
}
/**
* Get the next AlignmentContext available from the reads.
*
* @return a non-null AlignmentContext of the pileup after to the next genomic position covered by
* at least one read.
*/
@Override
public AlignmentContext next() {
lazyLoadNextAlignmentContext();
if (!hasNext())
throw new NoSuchElementException("LocusIteratorByState: out of elements.");
AlignmentContext currentAlignmentContext = nextAlignmentContext;
nextAlignmentContext = null;
return currentAlignmentContext;
}
/**
* Creates the next alignment context from the given state. Note that this is implemented as a
* lazy load method. nextAlignmentContext MUST BE null in order for this method to advance to the
* next entry.
*/
private void lazyLoadNextAlignmentContext() {
while (nextAlignmentContext == null && readStates.hasNext()) {
readStates.collectPendingReads();
final GenomeLoc location = getLocation();
final Map<String, ReadBackedPileupImpl> fullPileup = new HashMap<String, ReadBackedPileupImpl>();
for (final Map.Entry<String, PerSampleReadStateManager> sampleStatePair : readStates ) {
final String sample = sampleStatePair.getKey();
final PerSampleReadStateManager readState = sampleStatePair.getValue();
final Iterator<AlignmentStateMachine> iterator = readState.iterator();
final List<PileupElement> pile = new ArrayList<PileupElement>(readState.size());
while (iterator.hasNext()) {
// state object with the read/offset information
final AlignmentStateMachine state = iterator.next();
final GATKSAMRecord read = state.getRead();
final CigarOperator op = state.getCigarOperator();
if (op == CigarOperator.N) // N's are never added to any pileup
continue;
if (!dontIncludeReadInPileup(read, location.getStart())) {
if ( ! includeReadsWithDeletionAtLoci && op == CigarOperator.D ) {
continue;
}
pile.add(state.makePileupElement());
}
}
if (! pile.isEmpty() ) // if this pileup added at least one base, add it to the full pileup
fullPileup.put(sample, new ReadBackedPileupImpl(location, pile));
}
readStates.updateReadStates(); // critical - must be called after we get the current state offsets and location
if (!fullPileup.isEmpty()) // if we got reads with non-D/N over the current position, we are done
nextAlignmentContext = new AlignmentContext(location, new ReadBackedPileupImpl(location, fullPileup), false);
}
}
// -----------------------------------------------------------------------------------------------------------------
//
// getting the list of reads
//
// -----------------------------------------------------------------------------------------------------------------
/**
* Transfer current list of all unique reads that have ever been used in any pileup, clearing old list
*
* This list is guaranteed to only contain unique reads, even across calls to the this function. It is
* literally the unique set of reads ever seen.
*
* The list occurs in the same order as they are encountered in the underlying iterator.
*
* Takes the maintained list of submitted reads, and transfers it to the caller of this
* function. The old list of set to a new, cleanly allocated list so the caller officially
* owns the list returned by this call. This is the only way to clear the tracking
* of submitted reads, if enabled.
*
* The purpose of this function is allow users of LIBS to keep track of all of the reads pulled off the
* underlying GATKSAMRecord iterator and that appeared at any point in the list of SAMRecordAlignmentState for
* any reads. This function is intended to allow users to efficiently reconstruct the unique set of reads
* used across all pileups. This is necessary for LIBS to handle because attempting to do
* so from the pileups coming out of LIBS is extremely expensive.
*
* This functionality is only available if LIBS was created with the argument to track the reads
*
* @throws UnsupportedOperationException if called when keepingSubmittedReads is false
*
* @return the current list
*/
@Ensures("result != null")
public List<GATKSAMRecord> transferReadsFromAllPreviousPileups() {
return readStates.transferSubmittedReads();
}
/**
* Get the underlying list of tracked reads. For testing only
* @return a non-null list
*/
@Ensures("result != null")
protected List<GATKSAMRecord> getReadsFromAllPreviousPileups() {
return readStates.getSubmittedReads();
}
// -----------------------------------------------------------------------------------------------------------------
//
// utility functions
//
// -----------------------------------------------------------------------------------------------------------------
/**
* Should this read be excluded from the pileup?
*
* Generic place to put per-base filters appropriate to LocusIteratorByState
*
* @param rec the read to potentially exclude
* @param pos the genomic position of the current alignment
* @return true if the read should be excluded from the pileup, false otherwise
*/
@Requires({"rec != null", "pos > 0"})
private boolean dontIncludeReadInPileup(GATKSAMRecord rec, long pos) {
return ReadUtils.isBaseInsideAdaptor(rec, pos);
}
/**
* Create a LIBSDownsamplingInfo object from the requested info in ReadProperties
*
* LIBS will invoke the Reservoir and Leveling downsamplers on the read stream if we're
* downsampling to coverage by sample. SAMDataSource will have refrained from applying
* any downsamplers to the read stream in this case, in the expectation that LIBS will
* manage the downsampling. The reason for this is twofold: performance (don't have to
* split/re-assemble the read stream in SAMDataSource), and to enable partial downsampling
* of reads (eg., using half of a read, and throwing the rest away).
*
* @param readInfo GATK engine information about what should be done to the reads
* @return a LIBS specific info holder about downsampling only
*/
@Requires("readInfo != null")
@Ensures("result != null")
private static LIBSDownsamplingInfo toDownsamplingInfo(final ReadProperties readInfo) {
final boolean performDownsampling = readInfo.getDownsamplingMethod() != null &&
readInfo.getDownsamplingMethod().type == DownsampleType.BY_SAMPLE &&
readInfo.getDownsamplingMethod().toCoverage != null;
final int coverage = performDownsampling ? readInfo.getDownsamplingMethod().toCoverage : 0;
return new LIBSDownsamplingInfo(performDownsampling, coverage);
}
/**
* Create a pileup element for read at offset
*
* offset must correspond to a valid read offset given the read's cigar, or an IllegalStateException will be throw
*
* @param read a read
* @param offset the offset into the bases we'd like to use in the pileup
* @return a valid PileupElement with read and at offset
*/
@Ensures("result != null")
public static PileupElement createPileupForReadAndOffset(final GATKSAMRecord read, final int offset) {
if ( read == null ) throw new IllegalArgumentException("read cannot be null");
if ( offset < 0 || offset >= read.getReadLength() ) throw new IllegalArgumentException("Invalid offset " + offset + " outside of bounds 0 and " + read.getReadLength());
final AlignmentStateMachine stateMachine = new AlignmentStateMachine(read);
while ( stateMachine.stepForwardOnGenome() != null ) {
if ( stateMachine.getReadOffset() == offset )
return stateMachine.makePileupElement();
}
throw new IllegalStateException("Tried to create a pileup for read " + read + " with offset " + offset +
" but we never saw such an offset in the alignment state machine");
}
/**
* For testing only. Assumes that the incoming SAMRecords have no read groups, so creates a dummy sample list
* for the system.
*/
public static List<String> sampleListForSAMWithoutReadGroups() {
List<String> samples = new ArrayList<String>();
samples.add(null);
return samples;
}
}

View File

@ -0,0 +1,261 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.utils.locusiterator;
import com.google.java.contract.Ensures;
import com.google.java.contract.Invariant;
import com.google.java.contract.Requires;
import net.sf.samtools.CigarOperator;
import org.apache.log4j.Logger;
import org.broadinstitute.sting.gatk.downsampling.Downsampler;
import org.broadinstitute.sting.gatk.downsampling.LevelingDownsampler;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
/**
* ReadStateManager for a single sample
*
* User: depristo
* Date: 1/13/13
* Time: 12:28 PM
*/
@Invariant({
"readStartsAreWellOrdered()",
"! isDownsampling() || downsamplingTarget > 0",
"nSites >= 0",
"nSitesNeedingDownsampling >= 0",
"nSitesNeedingDownsampling <= nSites"
})
final class PerSampleReadStateManager implements Iterable<AlignmentStateMachine> {
private final static Logger logger = Logger.getLogger(ReadStateManager.class);
private final static boolean CAPTURE_DOWNSAMPLING_STATS = false;
/**
* A list (potentially empty) of alignment state machines.
*
* The state machines must be ordered by the alignment start of their underlying reads, with the
* lowest alignment starts on the left, and the largest on the right
*/
private LinkedList<AlignmentStateMachine> readStatesByAlignmentStart = new LinkedList<AlignmentStateMachine>();
private final Downsampler<LinkedList<AlignmentStateMachine>> levelingDownsampler;
private final int downsamplingTarget;
/**
* The number of sites where downsampling has been invoked
*/
private int nSitesNeedingDownsampling = 0;
/**
* The number of sites we've visited
*/
private int nSites = 0;
/**
* Create a new PerSampleReadStateManager with downsampling parameters as requested by LIBSDownsamplingInfo
* @param LIBSDownsamplingInfo the downsampling params we want to use
*/
public PerSampleReadStateManager(final LIBSDownsamplingInfo LIBSDownsamplingInfo) {
this.downsamplingTarget = LIBSDownsamplingInfo.isPerformDownsampling() ? LIBSDownsamplingInfo.getToCoverage() : -1;
this.levelingDownsampler = LIBSDownsamplingInfo.isPerformDownsampling()
? new LevelingDownsampler<LinkedList<AlignmentStateMachine>, AlignmentStateMachine>(LIBSDownsamplingInfo.getToCoverage())
: null;
}
/**
* Group the underlying readStatesByAlignmentStart into a list of list of alignment state machines,
* where each list contains machines with a unique genome site. The outer list is ordered
* by alignment start.
*
* For example, if the flat list has alignment starts [10, 10, 11, 12, 12, 13] then
* the resulting grouping will be [[10, 10], [11], [12, 12], [13]].
*
* @return a non-null list of lists
*/
@Ensures("result != null")
private List<LinkedList<AlignmentStateMachine>> groupByAlignmentStart() {
final LinkedList<LinkedList<AlignmentStateMachine>> grouped = new LinkedList<LinkedList<AlignmentStateMachine>>();
AlignmentStateMachine last = null;
for ( final AlignmentStateMachine stateMachine : readStatesByAlignmentStart ) {
if ( last == null || stateMachine.getGenomeOffset() != last.getGenomeOffset() ) {
// we've advanced to a place where the state machine has a different state,
// so start a new list
grouped.add(new LinkedList<AlignmentStateMachine>());
last = stateMachine;
}
grouped.getLast().add(stateMachine);
}
return grouped;
}
/**
* Flattens the grouped list of list of alignment state machines into a single list in order
* @return a non-null list contains the state machines
*/
@Ensures("result != null")
private LinkedList<AlignmentStateMachine> flattenByAlignmentStart(final List<LinkedList<AlignmentStateMachine>> grouped) {
final LinkedList<AlignmentStateMachine> flat = new LinkedList<AlignmentStateMachine>();
for ( final List<AlignmentStateMachine> l : grouped )
flat.addAll(l);
return flat;
}
/**
* Test that the reads are ordered by their alignment starts
* @return true if well ordered, false otherwise
*/
private boolean readStartsAreWellOrdered() {
int lastStart = -1;
for ( final AlignmentStateMachine machine : readStatesByAlignmentStart ) {
if ( lastStart > machine.getRead().getAlignmentStart() )
return false;
lastStart = machine.getRead().getAlignmentStart();
}
return true;
}
/**
* Assumes it can just keep the states linked lists without making a copy
* @param states the new states to add to this manager
* @return The change in the number of states, after including states and potentially downsampling. Note
* that this return result might be negative, if downsampling is enabled, as we might drop
* more sites than have been added by the downsampler
*/
@Requires("states != null")
public int addStatesAtNextAlignmentStart(final LinkedList<AlignmentStateMachine> states) {
if ( states.isEmpty() ) {
return 0;
}
readStatesByAlignmentStart.addAll(states);
int nStatesAdded = states.size();
if ( isDownsampling() && readStatesByAlignmentStart.size() > downsamplingTarget ) {
// only go into the downsampling branch if we are downsampling and the coverage > the target
captureDownsamplingStats();
levelingDownsampler.submit(groupByAlignmentStart());
levelingDownsampler.signalEndOfInput();
nStatesAdded -= levelingDownsampler.getNumberOfDiscardedItems();
// use returned List directly rather than make a copy, for efficiency's sake
readStatesByAlignmentStart = flattenByAlignmentStart(levelingDownsampler.consumeFinalizedItems());
levelingDownsampler.reset();
}
return nStatesAdded;
}
/**
* Is downsampling enabled for this manager?
* @return true if we are downsampling, false otherwise
*/
private boolean isDownsampling() {
return levelingDownsampler != null;
}
/**
* Get the leftmost alignment state machine, or null if the read states is empty
* @return a potentially null AlignmentStateMachine
*/
public AlignmentStateMachine getFirst() {
return isEmpty() ? null : readStatesByAlignmentStart.getFirst();
}
/**
* Capture some statistics about the behavior of the downsampling, but only if CAPTURE_DOWNSAMPLING_STATS is true
*/
@Requires("isDownsampling()")
private void captureDownsamplingStats() {
if ( CAPTURE_DOWNSAMPLING_STATS ) {
nSites++;
final int loc = getFirst().getGenomePosition();
String message = "Pass through";
final boolean downsampling = size() > downsamplingTarget;
if ( downsampling ) {
nSitesNeedingDownsampling++;
message = "Downsampling";
}
if ( downsampling || nSites % 10000 == 0 )
logger.info(String.format("%20s at %s: coverage=%d, max=%d, fraction of downsampled sites=%.2e",
message, loc, size(), downsamplingTarget, (1.0 * nSitesNeedingDownsampling / nSites)));
}
}
/**
* Is there at least one alignment for this sample in this manager?
* @return true if there's at least one alignment, false otherwise
*/
public boolean isEmpty() {
return readStatesByAlignmentStart.isEmpty();
}
/**
* Get the number of read states currently in this manager
* @return the number of read states
*/
@Ensures("result >= 0")
public int size() {
return readStatesByAlignmentStart.size();
}
/**
* Advances all read states forward by one element, removing states that are
* no long aligned to the current position.
* @return the number of states we're removed after advancing
*/
public int updateReadStates() {
int nRemoved = 0;
final Iterator<AlignmentStateMachine> it = iterator();
while (it.hasNext()) {
final AlignmentStateMachine state = it.next();
final CigarOperator op = state.stepForwardOnGenome();
if (op == null) {
// we discard the read only when we are past its end AND indel at the end of the read (if any) was
// already processed. Keeping the read state that returned null upon stepForwardOnGenome() is safe
// as the next call to stepForwardOnGenome() will return null again AND will clear hadIndel() flag.
it.remove(); // we've stepped off the end of the object
nRemoved++;
}
}
return nRemoved;
}
/**
* Iterate over the AlignmentStateMachine in this manager in alignment start order.
* @return a valid iterator
*/
@Ensures("result != null")
public Iterator<AlignmentStateMachine> iterator() {
return readStatesByAlignmentStart.iterator();
}
}

View File

@ -0,0 +1,290 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.utils.locusiterator;
import com.google.java.contract.Ensures;
import com.google.java.contract.Requires;
import net.sf.picard.util.PeekableIterator;
import org.apache.log4j.Logger;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import java.util.*;
/**
* Manages and updates mapping from sample -> List of SAMRecordAlignmentState
*
* Optionally can keep track of all of the reads pulled off the iterator and
* that appeared at any point in the list of SAMRecordAlignmentState for any reads.
* This functionaly is only possible at this stage, as this object does the popping of
* reads off the underlying source iterator, and presents only a pileup-like interface
* of samples -> SAMRecordAlignmentStates. Reconstructing the unique set of reads
* used across all pileups is extremely expensive from that data structure.
*
* User: depristo
* Date: 1/5/13
* Time: 2:02 PM
*/
final class ReadStateManager implements Iterable<Map.Entry<String, PerSampleReadStateManager>> {
private final List<String> samples;
private final PeekableIterator<GATKSAMRecord> iterator;
private final SamplePartitioner<GATKSAMRecord> samplePartitioner;
/**
* A mapping from sample name -> the per sample read state manager that manages
*
* IT IS CRITICAL THAT THIS BE A LINKED HASH MAP, SO THAT THE ITERATION OF THE MAP OCCURS IN THE SAME
* ORDER AS THE ORIGINL SAMPLES
*/
private final Map<String, PerSampleReadStateManager> readStatesBySample = new LinkedHashMap<String, PerSampleReadStateManager>();
private LinkedList<GATKSAMRecord> submittedReads;
private final boolean keepSubmittedReads;
private int totalReadStates = 0;
public ReadStateManager(final Iterator<GATKSAMRecord> source,
final List<String> samples,
final LIBSDownsamplingInfo LIBSDownsamplingInfo,
final boolean keepSubmittedReads) {
this.samples = samples;
this.iterator = new PeekableIterator<GATKSAMRecord>(source);
this.keepSubmittedReads = keepSubmittedReads;
this.submittedReads = new LinkedList<GATKSAMRecord>();
for (final String sample : samples) {
// because this is a linked hash map the order of iteration will be in sample order
readStatesBySample.put(sample, new PerSampleReadStateManager(LIBSDownsamplingInfo));
}
samplePartitioner = new SamplePartitioner<GATKSAMRecord>(LIBSDownsamplingInfo, samples);
}
/**
* Returns a iterator over all the sample -> per-sample read state managers with each sample in this read state manager.
*
* The order of iteration is the same as the order of the samples provided upon construction to this
* ReadStateManager.
*
* @return Iterator over sample + per sample read state manager pairs for this read state manager.
*/
@Override
public Iterator<Map.Entry<String, PerSampleReadStateManager>> iterator() {
return readStatesBySample.entrySet().iterator();
}
public boolean isEmpty() {
return totalReadStates == 0;
}
/**
* Retrieves the total number of reads in the manager across all samples.
*
* @return Total number of reads over all samples.
*/
public int size() {
return totalReadStates;
}
/**
* Retrieves the total number of reads in the manager in the given sample.
*
* @param sample The sample.
* @return Total number of reads in the given sample.
*/
public int size(final String sample) {
return readStatesBySample.get(sample).size();
}
public AlignmentStateMachine getFirst() {
for ( final PerSampleReadStateManager manager : readStatesBySample.values() ) {
if ( ! manager.isEmpty() )
return manager.getFirst();
}
return null;
}
public boolean hasNext() {
return totalReadStates > 0 || iterator.hasNext();
}
/**
* Advances all fo the read states by one bp. After this call the read states are reflective
* of the next pileup.
*/
public void updateReadStates() {
for (final PerSampleReadStateManager perSampleReadStateManager : readStatesBySample.values() ) {
totalReadStates -= perSampleReadStateManager.updateReadStates();
}
}
/**
* Does read start at the same position as described by currentContextIndex and currentAlignmentStart?
*
* @param read the read we want to test
* @param currentContigIndex the contig index (from the read's getReferenceIndex) of the reads in this state manager
* @param currentAlignmentStart the alignment start of the of the left-most position on the
* genome of the reads in this read state manager
* @return true if read has contig index and start equal to the current ones
*/
private boolean readStartsAtCurrentPosition(final GATKSAMRecord read, final int currentContigIndex, final int currentAlignmentStart) {
return read.getAlignmentStart() == currentAlignmentStart && read.getReferenceIndex() == currentContigIndex;
}
/**
* Pull all of the reads off the iterator that overlap the left-most position among all
* reads this ReadStateManager
*/
public void collectPendingReads() {
if (!iterator.hasNext())
return;
// determine the left-most boundary that determines which reads to keep in this new pileup
final int firstContigIndex;
final int firstAlignmentStart;
if ( isEmpty() ) {
// there are no reads here, so our next state is the next read in the stream
firstContigIndex = iterator.peek().getReferenceIndex();
firstAlignmentStart = iterator.peek().getAlignmentStart();
} else {
// there's a read in the system, so it's our targeted first read
final AlignmentStateMachine firstState = getFirst();
firstContigIndex = firstState.getReferenceIndex();
// note this isn't the alignment start of the read, but rather the alignment start position
firstAlignmentStart = firstState.getGenomePosition();
}
while ( iterator.hasNext() && readStartsAtCurrentPosition(iterator.peek(), firstContigIndex, firstAlignmentStart) ) {
submitRead(iterator.next());
}
samplePartitioner.doneSubmittingReads();
for (final String sample : samples) {
final Collection<GATKSAMRecord> newReads = samplePartitioner.getReadsForSample(sample);
// if we're keeping reads, take the (potentially downsampled) list of new reads for this sample
// and add to the list of reads. Note this may reorder the list of reads someone (it groups them
// by sample, but it cannot change their absolute position on the genome as they all must
// start at the current location
if ( keepSubmittedReads )
submittedReads.addAll(newReads);
final PerSampleReadStateManager statesBySample = readStatesBySample.get(sample);
addReadsToSample(statesBySample, newReads);
}
samplePartitioner.reset();
}
/**
* Add a read to the sample partitioner, potentially adding it to all submitted reads, if appropriate
* @param read a non-null read
*/
@Requires("read != null")
protected void submitRead(final GATKSAMRecord read) {
samplePartitioner.submitRead(read);
}
/**
* Transfer current list of submitted reads, clearing old list
*
* Takes the maintained list of submitted reads, and transfers it to the caller of this
* function. The old list of set to a new, cleanly allocated list so the caller officially
* owns the list returned by this call. This is the only way to clear the tracking
* of submitted reads, if enabled.
*
* How to use this function:
*
* while ( doing some work unit, such as creating pileup at some locus ):
* interact with ReadStateManager in some way to make work unit
* readsUsedInPileup = transferSubmittedReads)
*
* @throws UnsupportedOperationException if called when keepSubmittedReads is false
*
* @return the current list of submitted reads
*/
@Ensures({
"result != null",
"result != submittedReads" // result and previous submitted reads are not == objects
})
public List<GATKSAMRecord> transferSubmittedReads() {
if ( ! keepSubmittedReads ) throw new UnsupportedOperationException("cannot transferSubmittedReads if you aren't keeping them");
final List<GATKSAMRecord> prevSubmittedReads = submittedReads;
this.submittedReads = new LinkedList<GATKSAMRecord>();
return prevSubmittedReads;
}
/**
* Are we keeping submitted reads, or not?
* @return true if we are keeping them, false otherwise
*/
public boolean isKeepingSubmittedReads() {
return keepSubmittedReads;
}
/**
* Obtain a pointer to the list of submitted reads.
*
* This is not a copy of the list; it is shared with this ReadStateManager. It should
* not be modified. Updates to this ReadStateManager may change the contains of the
* list entirely.
*
* For testing purposes only.
*
* Will always be empty if we are are not keepSubmittedReads
*
* @return a non-null list of reads that have been submitted to this ReadStateManager
*/
@Ensures({"result != null","keepSubmittedReads || result.isEmpty()"})
protected List<GATKSAMRecord> getSubmittedReads() {
return submittedReads;
}
/**
* Add reads with the given sample name to the given hanger entry.
*
* @param readStates The list of read states to add this collection of reads.
* @param reads Reads to add. Selected reads will be pulled from this source.
*/
private void addReadsToSample(final PerSampleReadStateManager readStates, final Collection<GATKSAMRecord> reads) {
if (reads.isEmpty())
return;
final LinkedList<AlignmentStateMachine> newReadStates = new LinkedList<AlignmentStateMachine>();
for (final GATKSAMRecord read : reads) {
final AlignmentStateMachine state = new AlignmentStateMachine(read);
if ( state.stepForwardOnGenome() != null ) // todo -- should be an assertion not a skip
// explicitly filter out reads that are all insertions / soft clips
newReadStates.add(state);
}
totalReadStates += readStates.addStatesAtNextAlignmentStart(newReadStates);
}
}

View File

@ -0,0 +1,172 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.utils.locusiterator;
import com.google.java.contract.Ensures;
import com.google.java.contract.Requires;
import net.sf.samtools.SAMRecord;
import org.broadinstitute.sting.gatk.downsampling.Downsampler;
import org.broadinstitute.sting.gatk.downsampling.PassThroughDownsampler;
import org.broadinstitute.sting.gatk.downsampling.ReservoirDownsampler;
import java.util.*;
/**
* Divides reads by sample and (if requested) does a preliminary downsampling pass
* with a ReservoirDownsampler.
*
* Note: stores reads by sample ID string, not by sample object
*/
class SamplePartitioner<T extends SAMRecord> {
/**
* Map from sample name (as a string) to a downsampler of reads for that sample
*/
final private Map<String, Downsampler<T>> readsBySample;
/**
* Are we in a state where we're done submitting reads and have semi-finalized the
* underlying per sample downsampler?
*/
boolean doneSubmittingReads = false;
/**
* Create a new SamplePartitioner capable of splitting reads up into buckets of reads for
* each sample in samples, and perform a preliminary downsampling of these reads
* (separately for each sample) if downsampling is requested in LIBSDownsamplingInfo
*
* Note that samples must be comprehensive, in that all reads every submitted to this
* partitioner must come from one of the samples provided here. If not, submitRead
* will throw an exception. Duplicates in the list of samples will be ignored
*
* @param LIBSDownsamplingInfo do we want to downsample, and if so to what coverage?
* @param samples the complete list of samples we're going to partition reads into. Can be
* empty, but in that case this code cannot function properly if you
* attempt to add data to it.
*/
@Ensures({
"readsBySample != null",
"readsBySample.size() == new HashSet(samples).size()"
})
public SamplePartitioner(final LIBSDownsamplingInfo LIBSDownsamplingInfo, final List<String> samples) {
if ( LIBSDownsamplingInfo == null ) throw new IllegalArgumentException("LIBSDownsamplingInfo cannot be null");
if ( samples == null ) throw new IllegalArgumentException("samples must be a non-null list");
readsBySample = new LinkedHashMap<String, Downsampler<T>>(samples.size());
for ( final String sample : samples ) {
readsBySample.put(sample, createDownsampler(LIBSDownsamplingInfo));
}
}
/**
* Create a new, ready to use downsampler based on the parameters in LIBSDownsamplingInfo
* @param LIBSDownsamplingInfo the parameters to use in creating the downsampler
* @return a downsampler appropriate for LIBSDownsamplingInfo. If no downsampling is requested,
* uses the PassThroughDownsampler, which does nothing at all.
*/
@Requires("LIBSDownsamplingInfo != null")
@Ensures("result != null")
private Downsampler<T> createDownsampler(final LIBSDownsamplingInfo LIBSDownsamplingInfo) {
return LIBSDownsamplingInfo.isPerformDownsampling()
? new ReservoirDownsampler<T>(LIBSDownsamplingInfo.getToCoverage(), true)
: new PassThroughDownsampler<T>();
}
/**
* Offer this read to the partitioner, putting it into the bucket of reads for the sample
* of read (obtained via the read's read group).
*
* If the read group is missing, uses the special "null" read group
*
* @throws IllegalStateException if the sample of read wasn't present in the original
* set of samples provided to this SamplePartitioner at construction
*
* @param read the read to add to the sample's list of reads
*/
@Requires("read != null")
@Ensures("doneSubmittingReads == false")
public void submitRead(final T read) {
final String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null;
final Downsampler<T> downsampler = readsBySample.get(sampleName);
if ( downsampler == null )
throw new IllegalStateException("Offered read with sample name " + sampleName + " to SamplePartitioner " +
"but this sample wasn't provided as one of possible samples at construction");
downsampler.submit(read);
doneSubmittingReads = false;
}
/**
* Tell this partitioner that all reads in this cycle have been submitted, so that we
* can finalize whatever downsampling is required by each sample.
*
* Note that we *must* call this function before getReadsForSample, or else that
* function will exception out.
*/
@Ensures("doneSubmittingReads == true")
public void doneSubmittingReads() {
for ( final Downsampler<T> downsampler : readsBySample.values() ) {
downsampler.signalEndOfInput();
}
doneSubmittingReads = true;
}
/**
* Get the final collection of reads for this sample for this cycle
*
* The cycle is defined as all of the reads that occur between
* the first call to submitRead until doneSubmittingReads is called. At that
* point additional downsampling may occur (depending on construction arguments)
* and that set of reads is returned here.
*
* Note that this function can only be called once per cycle, as underlying
* collection of reads is cleared.
*
* @param sampleName the sample we want reads for, must be present in the original samples
* @return a non-null collection of reads for sample in this cycle
*/
@Ensures("result != null")
public Collection<T> getReadsForSample(final String sampleName) {
if ( ! doneSubmittingReads ) throw new IllegalStateException("getReadsForSample called before doneSubmittingReads was called");
final Downsampler<T> downsampler = readsBySample.get(sampleName);
if ( downsampler == null ) throw new NoSuchElementException("Sample name not found");
return downsampler.consumeFinalizedItems();
}
/**
* Resets this SamplePartitioner, indicating that we're starting a new
* cycle of adding reads to each underlying downsampler.
*/
@Ensures("doneSubmittingReads == false")
public void reset() {
for ( final Downsampler<T> downsampler : readsBySample.values() ) {
downsampler.clear();
downsampler.reset();
}
doneSubmittingReads = false;
}
}

View File

@ -27,12 +27,18 @@ package org.broadinstitute.sting.utils.pileup;
import com.google.java.contract.Ensures;
import com.google.java.contract.Requires;
import net.sf.samtools.CigarElement;
import net.sf.samtools.CigarOperator;
import org.broadinstitute.variant.utils.BaseUtils;
import org.broadinstitute.sting.utils.MathUtils;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.exceptions.UserException;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.LinkedList;
import java.util.List;
/**
* Created by IntelliJ IDEA.
* User: depristo
@ -40,7 +46,12 @@ import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
* Time: 8:54:05 AM
*/
public class PileupElement implements Comparable<PileupElement> {
public static final byte DELETION_BASE = BaseUtils.D;
private final static LinkedList<CigarElement> EMPTY_LINKED_LIST = new LinkedList<CigarElement>();
private final static EnumSet<CigarOperator> ON_GENOME_OPERATORS =
EnumSet.of(CigarOperator.M, CigarOperator.EQ, CigarOperator.X, CigarOperator.D);
public static final byte DELETION_BASE = BaseUtils.Base.D.base;
public static final byte DELETION_QUAL = (byte) 16;
public static final byte A_FOLLOWED_BY_INSERTION_BASE = (byte) 87;
public static final byte C_FOLLOWED_BY_INSERTION_BASE = (byte) 88;
@ -49,138 +60,213 @@ public class PileupElement implements Comparable<PileupElement> {
protected final GATKSAMRecord read; // the read this base belongs to
protected final int offset; // the offset in the bases array for this base
protected final boolean isDeletion; // is this base a deletion
protected final boolean isBeforeDeletedBase; // is the base to the right of this base an deletion
protected final boolean isAfterDeletedBase; // is the base to the left of this base a deletion
protected final boolean isBeforeInsertion; // is the base to the right of this base an insertion
protected final boolean isAfterInsertion; // is the base to the left of this base an insertion
protected final boolean isNextToSoftClip; // is this base either before or after a soft clipped base
protected final int eventLength; // what is the length of the event (insertion or deletion) *after* this base
protected final String eventBases; // if it is a deletion, we do not have information about the actual deleted bases in the read itself, so we fill the string with D's; for insertions we keep actual inserted bases
private final CigarElement currentCigarElement;
private final int currentCigarOffset;
private final int offsetInCurrentCigar;
/**
* Creates a new pileup element.
* Create a new pileup element
*
* @param read the read we are adding to the pileup
* @param offset the position in the read for this base. All deletions must be left aligned! (-1 is only allowed for reads starting with insertions)
* @param isDeletion whether or not this base is a deletion
* @param isBeforeDeletion whether or not this base is before a deletion
* @param isAfterDeletion whether or not this base is after a deletion
* @param isBeforeInsertion whether or not this base is before an insertion
* @param isAfterInsertion whether or not this base is after an insertion
* @param isNextToSoftClip whether or not this base is next to a soft clipped base
* @param nextEventBases bases in event in case element comes before insertion or deletion
* @param nextEventLength length of next event in case it's insertion or deletion
* @param read a non-null read to pileup
* @param baseOffset the offset into the read's base / qual vector aligned to this position on the genome. If the
* current cigar element is a deletion, offset should be the offset of the last M/=/X position.
* @param currentElement a non-null CigarElement that indicates the cigar element aligning the read to the genome
* @param currentCigarOffset the offset of currentElement in read.getCigar().getElement(currentCigarOffset) == currentElement)
* @param offsetInCurrentCigar how far into the currentElement are we in our alignment to the genome?
*/
@Requires({
"read != null",
"offset >= -1",
"offset <= read.getReadLength()"})
public PileupElement(final GATKSAMRecord read, final int offset, final boolean isDeletion, final boolean isBeforeDeletion, final boolean isAfterDeletion, final boolean isBeforeInsertion, final boolean isAfterInsertion, final boolean isNextToSoftClip, final String nextEventBases, final int nextEventLength) {
if (offset < 0 && isDeletion)
throw new ReviewedStingException("Pileup Element cannot create a deletion with a negative offset");
public PileupElement(final GATKSAMRecord read, final int baseOffset,
final CigarElement currentElement, final int currentCigarOffset,
final int offsetInCurrentCigar) {
assert currentElement != null;
this.read = read;
this.offset = offset;
this.isDeletion = isDeletion;
this.isBeforeDeletedBase = isBeforeDeletion;
this.isAfterDeletedBase = isAfterDeletion;
this.isBeforeInsertion = isBeforeInsertion;
this.isAfterInsertion = isAfterInsertion;
this.isNextToSoftClip = isNextToSoftClip;
if (isBeforeInsertion)
eventBases = nextEventBases;
else
eventBases = null; // ignore argument in any other case
if (isBeforeDeletion || isBeforeInsertion)
eventLength = nextEventLength;
else
eventLength = -1;
this.offset = baseOffset;
this.currentCigarElement = currentElement;
this.currentCigarOffset = currentCigarOffset;
this.offsetInCurrentCigar = offsetInCurrentCigar;
// for performance regions these are assertions
assert this.read != null;
assert this.offset >= 0 && this.offset < this.read.getReadLength();
assert this.currentCigarOffset >= 0;
assert this.currentCigarOffset < read.getCigarLength();
assert this.offsetInCurrentCigar >= 0;
assert this.offsetInCurrentCigar < currentElement.getLength();
}
public PileupElement(final GATKSAMRecord read, final int offset, final boolean isDeletion, final boolean isBeforeDeletion, final boolean isAfterDeletion, final boolean isBeforeInsertion, final boolean isAfterInsertion, final boolean isNextToSoftClip) {
this(read, offset, isDeletion, isBeforeDeletion, isAfterDeletion, isBeforeInsertion, isAfterInsertion, isNextToSoftClip, null, -1);
/**
* Create a new PileupElement that's a copy of toCopy
* @param toCopy the element we want to copy
*/
public PileupElement(final PileupElement toCopy) {
this(toCopy.read, toCopy.offset, toCopy.currentCigarElement, toCopy.currentCigarOffset, toCopy.offsetInCurrentCigar);
}
/**
* Is this element a deletion w.r.t. the reference genome?
*
* @return true if this is a deletion, false otherwise
*/
public boolean isDeletion() {
return isDeletion;
}
public boolean isBeforeDeletedBase() {
return isBeforeDeletedBase;
}
public boolean isAfterDeletedBase() {
return isAfterDeletedBase;
return currentCigarElement.getOperator() == CigarOperator.D;
}
/**
* Is the current element immediately before a deletion, but itself not a deletion?
*
* Suppose we are aligning a read with cigar 3M2D1M. This function is true
* if we are in the last cigar position of the 3M, but not if we are in the 2D itself.
*
* @return true if the next alignment position is a deletion w.r.t. the reference genome
*/
public boolean isBeforeDeletionStart() {
return isBeforeDeletedBase && !isDeletion;
return ! isDeletion() && atEndOfCurrentCigar() && hasOperator(getNextOnGenomeCigarElement(), CigarOperator.D);
}
/**
* Is the current element immediately after a deletion, but itself not a deletion?
*
* Suppose we are aligning a read with cigar 1M2D3M. This function is true
* if we are in the first cigar position of the 3M, but not if we are in the 2D itself or
* in any but the first position of the 3M.
*
* @return true if the previous alignment position is a deletion w.r.t. the reference genome
*/
public boolean isAfterDeletionEnd() {
return isAfterDeletedBase && !isDeletion;
}
public boolean isBeforeInsertion() {
return isBeforeInsertion;
}
public boolean isAfterInsertion() {
return isAfterInsertion;
}
public boolean isNextToSoftClip() {
return isNextToSoftClip;
}
public boolean isInsertionAtBeginningOfRead() {
return offset == -1;
return ! isDeletion() && atStartOfCurrentCigar() && hasOperator(getPreviousOnGenomeCigarElement(), CigarOperator.D);
}
/**
* Get the read for this pileup element
* @return a non-null GATKSAMRecord
*/
@Ensures("result != null")
public GATKSAMRecord getRead() {
return read;
}
@Ensures("result == offset")
/**
* Get the offset of the this element into the read that aligns that read's base to this genomic position.
*
* If the current element is a deletion then offset is the offset of the last base containing offset.
*
* @return a valid offset into the read's bases
*/
@Ensures({"result >= 0", "result <= read.getReadLength()"})
public int getOffset() {
return offset;
}
public byte getBase() {
return getBase(offset);
}
public int getBaseIndex() {
return getBaseIndex(offset);
}
public byte getQual() {
return getQual(offset);
}
public byte getBaseInsertionQual() {
return getBaseInsertionQual(offset);
}
public byte getBaseDeletionQual() {
return getBaseDeletionQual(offset);
}
/**
* @return length of the event (number of inserted or deleted bases
* Get the base aligned to the genome at this location
*
* If the current element is a deletion returns DELETION_BASE
*
* @return a base encoded as a byte
*/
public int getEventLength() {
return eventLength;
@Ensures("result != DELETION_BASE || (isDeletion() && result == DELETION_BASE)")
public byte getBase() {
return isDeletion() ? DELETION_BASE : read.getReadBases()[offset];
}
@Deprecated
public int getBaseIndex() {
return BaseUtils.simpleBaseToBaseIndex(getBase());
}
/**
* Get the base quality score of the base at this aligned position on the genome
* @return a phred-scaled quality score as a byte
*/
public byte getQual() {
return isDeletion() ? DELETION_QUAL : read.getBaseQualities()[offset];
}
/**
* Get the Base Insertion quality at this pileup position
* @return a phred-scaled quality score as a byte
*/
public byte getBaseInsertionQual() {
return isDeletion() ? DELETION_QUAL : read.getBaseInsertionQualities()[offset];
}
/**
* Get the Base Deletion quality at this pileup position
* @return a phred-scaled quality score as a byte
*/
public byte getBaseDeletionQual() {
return isDeletion() ? DELETION_QUAL : read.getBaseDeletionQualities()[offset];
}
/**
* Get the length of an immediately following insertion or deletion event, or 0 if no such event exists
*
* Only returns a positive value when this pileup element is immediately before an indel. Being
* immediately before a deletion means that this pileup element isn't an deletion, and that the
* next genomic alignment for this read is a deletion. For the insertion case, this means
* that an insertion cigar occurs immediately after this element, between this one and the
* next genomic position.
*
* Note this function may be expensive, so multiple uses should be cached by the caller
*
* @return length of the event (number of inserted or deleted bases), or 0
*/
@Ensures("result >= 0")
public int getLengthOfImmediatelyFollowingIndel() {
final CigarElement element = getNextIndelCigarElement();
return element == null ? 0 : element.getLength();
}
/**
* Helpful function to get the immediately following cigar element, for an insertion or deletion
*
* if this state precedes a deletion (i.e., next position on genome) or insertion (immediately between
* this and the next position) returns the CigarElement corresponding to this event. Otherwise returns
* null.
*
* @return a CigarElement, or null if the next alignment state ins't an insertion or deletion.
*/
private CigarElement getNextIndelCigarElement() {
if ( isBeforeDeletionStart() ) {
final CigarElement element = getNextOnGenomeCigarElement();
if ( element == null || element.getOperator() != CigarOperator.D )
throw new IllegalStateException("Immediately before deletion but the next cigar element isn't a deletion " + element);
return element;
} else if ( isBeforeInsertion() ) {
final CigarElement element = getBetweenNextPosition().get(0);
if ( element.getOperator() != CigarOperator.I )
throw new IllegalStateException("Immediately before insertion but the next cigar element isn't an insertion " + element);
return element;
} else {
return null;
}
}
/**
* Get the bases for an insertion that immediately follows this alignment state, or null if none exists
*
* @see #getLengthOfImmediatelyFollowingIndel() for details on the meaning of immediately.
*
* If the immediately following state isn't an insertion, returns null
*
* @return actual sequence of inserted bases, or a null if the event is a deletion or if there is no event in the associated read.
*/
public String getEventBases() {
return eventBases;
@Ensures("result == null || result.length() == getLengthOfImmediatelyFollowingIndel()")
public String getBasesOfImmediatelyFollowingInsertion() {
final CigarElement element = getNextIndelCigarElement();
if ( element != null && element.getOperator() == CigarOperator.I ) {
final int getFrom = offset + 1;
final byte[] bases = Arrays.copyOfRange(read.getReadBases(), getFrom, getFrom + element.getLength());
return new String(bases);
} else
return null;
}
/**
* Get the mapping quality of the read of this element
* @return the mapping quality of the underlying SAM record
*/
public int getMappingQual() {
return read.getMappingQuality();
}
@ -190,26 +276,6 @@ public class PileupElement implements Comparable<PileupElement> {
return String.format("%s @ %d = %c Q%d", getRead().getReadName(), getOffset(), (char) getBase(), getQual());
}
protected byte getBase(final int offset) {
return (isDeletion() || isInsertionAtBeginningOfRead()) ? DELETION_BASE : read.getReadBases()[offset];
}
protected int getBaseIndex(final int offset) {
return BaseUtils.simpleBaseToBaseIndex((isDeletion() || isInsertionAtBeginningOfRead()) ? DELETION_BASE : read.getReadBases()[offset]);
}
protected byte getQual(final int offset) {
return (isDeletion() || isInsertionAtBeginningOfRead()) ? DELETION_QUAL : read.getBaseQualities()[offset];
}
protected byte getBaseInsertionQual(final int offset) {
return (isDeletion() || isInsertionAtBeginningOfRead()) ? DELETION_QUAL : read.getBaseInsertionQualities()[offset];
}
protected byte getBaseDeletionQual(final int offset) {
return (isDeletion() || isInsertionAtBeginningOfRead()) ? DELETION_QUAL : read.getBaseDeletionQualities()[offset];
}
@Override
public int compareTo(final PileupElement pileupElement) {
if (offset < pileupElement.offset)
@ -240,15 +306,258 @@ public class PileupElement implements Comparable<PileupElement> {
* @return
*/
public int getRepresentativeCount() {
int representativeCount = 1;
if (read.isReducedRead() && !isInsertionAtBeginningOfRead()) {
if (read.isReducedRead()) {
if (isDeletion() && (offset + 1 >= read.getReadLength()) ) // deletion in the end of the read
throw new UserException.MalformedBAM(read, String.format("Adjacent I/D events in read %s -- cigar: %s", read.getReadName(), read.getCigarString()));
representativeCount = (isDeletion()) ? MathUtils.fastRound((read.getReducedCount(offset) + read.getReducedCount(offset + 1)) / 2.0) : read.getReducedCount(offset);
return isDeletion()
? MathUtils.fastRound((read.getReducedCount(offset) + read.getReducedCount(offset + 1)) / 2.0)
: read.getReducedCount(offset);
} else {
return 1;
}
return representativeCount;
}
/**
* Get the cigar element aligning this element to the genome
* @return a non-null CigarElement
*/
@Ensures("result != null")
public CigarElement getCurrentCigarElement() {
return currentCigarElement;
}
/**
* Get the offset of this cigar element in the Cigar of the current read (0-based)
*
* Suppose the cigar is 1M2D3I4D. If we are in the 1M state this function returns
* 0. If we are in 2D, the result is 1. If we are in the 4D, the result is 3.
*
* @return an offset into the read.getCigar() that brings us to the current cigar element
*/
public int getCurrentCigarOffset() {
return currentCigarOffset;
}
/**
* Get the offset into the *current* cigar element for this alignment position
*
* We can be anywhere from offset 0 (first position) to length - 1 of the current
* cigar element aligning us to this genomic position.
*
* @return a valid offset into the current cigar element
*/
@Ensures({"result >= 0", "result < getCurrentCigarElement().getLength()"})
public int getOffsetInCurrentCigar() {
return offsetInCurrentCigar;
}
/**
* Get the cigar elements that occur before the current position but after the previous position on the genome
*
* For example, if we are in the 3M state of 1M2I3M state then 2I occurs before this position.
*
* Note that this function does not care where we are in the current cigar element. In the previous
* example this list of elements contains the 2I state regardless of where you are in the 3M.
*
* Note this returns the list of all elements that occur between this and the prev site, so for
* example we might have 5S10I2M and this function would return [5S, 10I].
*
* @return a non-null list of CigarElements
*/
@Ensures("result != null")
public LinkedList<CigarElement> getBetweenPrevPosition() {
return atStartOfCurrentCigar() ? getBetween(Direction.PREV) : EMPTY_LINKED_LIST;
}
/**
* Get the cigar elements that occur after the current position but before the next position on the genome
*
* @see #getBetweenPrevPosition() for more details
*
* @return a non-null list of CigarElements
*/
@Ensures("result != null")
public LinkedList<CigarElement> getBetweenNextPosition() {
return atEndOfCurrentCigar() ? getBetween(Direction.NEXT) : EMPTY_LINKED_LIST;
}
/** for some helper functions */
private enum Direction { PREV, NEXT }
/**
* Helper function to get cigar elements between this and either the prev or next genomic position
*
* @param direction PREVIOUS if we want before, NEXT if we want after
* @return a non-null list of cigar elements between this and the neighboring position in direction
*/
@Ensures("result != null")
private LinkedList<CigarElement> getBetween(final Direction direction) {
final int increment = direction == Direction.NEXT ? 1 : -1;
LinkedList<CigarElement> elements = null;
final int nCigarElements = read.getCigarLength();
for ( int i = currentCigarOffset + increment; i >= 0 && i < nCigarElements; i += increment) {
final CigarElement elt = read.getCigar().getCigarElement(i);
if ( ON_GENOME_OPERATORS.contains(elt.getOperator()) )
break;
else {
// optimization: don't allocate list if not necessary
if ( elements == null )
elements = new LinkedList<CigarElement>();
if ( increment > 0 )
// to keep the list in the right order, if we are incrementing positively add to the end
elements.add(elt);
else
// counting down => add to front
elements.addFirst(elt);
}
}
// optimization: elements is null because nothing got added, just return the empty list
return elements == null ? EMPTY_LINKED_LIST : elements;
}
/**
* Get the cigar element of the previous genomic aligned position
*
* For example, we might have 1M2I3M, and be sitting at the someone in the 3M. This
* function would return 1M, as the 2I isn't on the genome. Note this function skips
* all of the positions that would occur in the current element. So the result
* is always 1M regardless of whether we're in the first, second, or third position of the 3M
* cigar.
*
* @return a CigarElement, or null (indicating that no previous element exists)
*/
@Ensures("result == null || ON_GENOME_OPERATORS.contains(result.getOperator())")
public CigarElement getPreviousOnGenomeCigarElement() {
return getNeighboringOnGenomeCigarElement(Direction.PREV);
}
/**
* Get the cigar element of the next genomic aligned position
*
* @see #getPreviousOnGenomeCigarElement() for more details
*
* @return a CigarElement, or null (indicating that no next element exists)
*/
@Ensures("result == null || ON_GENOME_OPERATORS.contains(result.getOperator())")
public CigarElement getNextOnGenomeCigarElement() {
return getNeighboringOnGenomeCigarElement(Direction.NEXT);
}
/**
* Helper function to get the cigar element of the next or previous genomic position
* @param direction the direction to look in
* @return a CigarElement, or null if no such element exists
*/
@Ensures("result == null || ON_GENOME_OPERATORS.contains(result.getOperator())")
private CigarElement getNeighboringOnGenomeCigarElement(final Direction direction) {
final int increment = direction == Direction.NEXT ? 1 : -1;
final int nCigarElements = read.getCigarLength();
for ( int i = currentCigarOffset + increment; i >= 0 && i < nCigarElements; i += increment) {
final CigarElement elt = read.getCigar().getCigarElement(i);
if ( ON_GENOME_OPERATORS.contains(elt.getOperator()) )
return elt;
}
// getting here means that you didn't find anything
return null;
}
/**
* Does the cigar element (which may be null) have operation toMatch?
*
* @param maybeCigarElement a CigarElement that might be null
* @param toMatch a CigarOperator we want to match against the one in maybeCigarElement
* @return true if maybeCigarElement isn't null and has operator toMatch
*/
@Requires("toMatch != null")
private boolean hasOperator(final CigarElement maybeCigarElement, final CigarOperator toMatch) {
return maybeCigarElement != null && maybeCigarElement.getOperator() == toMatch;
}
/**
* Does an insertion occur immediately before the current position on the genome?
*
* @return true if yes, false if no
*/
public boolean isAfterInsertion() { return isAfter(getBetweenPrevPosition(), CigarOperator.I); }
/**
* Does an insertion occur immediately after the current position on the genome?
*
* @return true if yes, false if no
*/
public boolean isBeforeInsertion() { return isBefore(getBetweenNextPosition(), CigarOperator.I); }
/**
* Does a soft-clipping event occur immediately before the current position on the genome?
*
* @return true if yes, false if no
*/
public boolean isAfterSoftClip() { return isAfter(getBetweenPrevPosition(), CigarOperator.S); }
/**
* Does a soft-clipping event occur immediately after the current position on the genome?
*
* @return true if yes, false if no
*/
public boolean isBeforeSoftClip() { return isBefore(getBetweenNextPosition(), CigarOperator.S); }
/**
* Does a soft-clipping event occur immediately before or after the current position on the genome?
*
* @return true if yes, false if no
*/
public boolean isNextToSoftClip() { return isAfterSoftClip() || isBeforeSoftClip(); }
/**
* Is the current position at the end of the current cigar?
*
* For example, if we are in element 3M, this function returns true if we are at offsetInCurrentCigar
* of 2, but not 0 or 1.
*
* @return true if we're at the end of the current cigar
*/
public boolean atEndOfCurrentCigar() {
return offsetInCurrentCigar == currentCigarElement.getLength() - 1;
}
/**
* Is the current position at the start of the current cigar?
*
* For example, if we are in element 3M, this function returns true if we are at offsetInCurrentCigar
* of 0, but not 1 or 2.
*
* @return true if we're at the start of the current cigar
*/
public boolean atStartOfCurrentCigar() {
return offsetInCurrentCigar == 0;
}
/**
* Is op the last element in the list of elements?
*
* @param elements the elements to examine
* @param op the op we want the last element's op to equal
* @return true if op == last(elements).op
*/
@Requires({"elements != null", "op != null"})
private boolean isAfter(final LinkedList<CigarElement> elements, final CigarOperator op) {
return ! elements.isEmpty() && elements.peekLast().getOperator() == op;
}
/**
* Is op the first element in the list of elements?
*
* @param elements the elements to examine
* @param op the op we want the last element's op to equal
* @return true if op == first(elements).op
*/
@Requires({"elements != null", "op != null"})
private boolean isBefore(final List<CigarElement> elements, final CigarOperator op) {
return ! elements.isEmpty() && elements.get(0).getOperator() == op;
}
}

View File

@ -25,6 +25,8 @@
package org.broadinstitute.sting.utils.pileup;
import org.apache.commons.collections.iterators.IteratorChain;
import java.util.*;
/**
@ -35,6 +37,20 @@ import java.util.*;
*/
abstract class PileupElementTracker<PE extends PileupElement> implements Iterable<PE> {
public abstract int size();
/**
* Iterate through the PEs here, but in any order, which may improve performance
* if you don't care about the underlying order the reads are coming to you in.
* @return an iteratable over all pileup elements in this tracker
*/
public abstract Iterable<PE> unorderedIterable();
/**
* Same as @see #unorderedIterable but the actual iterator itself
* @return
*/
public Iterator<PE> unorderedIterator() { return unorderedIterable().iterator(); }
public abstract PileupElementTracker<PE> copy();
}
@ -65,6 +81,7 @@ class UnifiedPileupElementTracker<PE extends PileupElement> extends PileupElemen
}
public Iterator<PE> iterator() { return pileup.iterator(); }
public Iterable<PE> unorderedIterable() { return this; }
}
class PerSamplePileupElementTracker<PE extends PileupElement> extends PileupElementTracker<PE> {
@ -113,4 +130,25 @@ class PerSamplePileupElementTracker<PE extends PileupElement> extends PileupElem
public int size() {
return size;
}
public Iterable<PE> unorderedIterable() {
return new Iterable<PE>() {
@Override
public Iterator<PE> iterator() {
return new Iterator<PE>() {
final private IteratorChain chain = new IteratorChain();
{ // initialize the chain with the unordered iterators of the per sample pileups
for ( PileupElementTracker<PE> pet : pileup.values() ) {
chain.addIterator(pet.unorderedIterator());
}
}
@Override public boolean hasNext() { return chain.hasNext(); }
@Override public PE next() { return (PE)chain.next(); }
@Override public void remove() { throw new UnsupportedOperationException("Cannot remove"); }
};
}
};
}
}

View File

@ -31,7 +31,6 @@ import net.sf.samtools.CigarOperator;
import net.sf.samtools.SAMRecord;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.variant.utils.BaseUtils;
import org.broadinstitute.sting.utils.Utils;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.pileup.PileupElement;
import org.broadinstitute.sting.utils.pileup.ReadBackedPileup;
@ -297,7 +296,7 @@ public class AlignmentUtils {
}
public static int calcAlignmentByteArrayOffset(final Cigar cigar, final PileupElement pileupElement, final int alignmentStart, final int refLocus) {
return calcAlignmentByteArrayOffset( cigar, pileupElement.getOffset(), pileupElement.isInsertionAtBeginningOfRead(), pileupElement.isDeletion(), alignmentStart, refLocus );
return calcAlignmentByteArrayOffset( cigar, pileupElement.getOffset(), false, pileupElement.isDeletion(), alignmentStart, refLocus );
}
public static int calcAlignmentByteArrayOffset(final Cigar cigar, final int offset, final boolean isInsertionAtBeginningOfRead, final boolean isDeletion, final int alignmentStart, final int refLocus) {
@ -402,13 +401,13 @@ public class AlignmentUtils {
switch (ce.getOperator()) {
case I:
if (alignPos > 0) {
if (alignment[alignPos - 1] == BaseUtils.A) {
if (alignment[alignPos - 1] == BaseUtils.Base.A.base) {
alignment[alignPos - 1] = PileupElement.A_FOLLOWED_BY_INSERTION_BASE;
} else if (alignment[alignPos - 1] == BaseUtils.C) {
} else if (alignment[alignPos - 1] == BaseUtils.Base.C.base) {
alignment[alignPos - 1] = PileupElement.C_FOLLOWED_BY_INSERTION_BASE;
} else if (alignment[alignPos - 1] == BaseUtils.T) {
} else if (alignment[alignPos - 1] == BaseUtils.Base.T.base) {
alignment[alignPos - 1] = PileupElement.T_FOLLOWED_BY_INSERTION_BASE;
} else if (alignment[alignPos - 1] == BaseUtils.G) {
} else if (alignment[alignPos - 1] == BaseUtils.Base.G.base) {
alignment[alignPos - 1] = PileupElement.G_FOLLOWED_BY_INSERTION_BASE;
}
}

View File

@ -0,0 +1,225 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.utils.sam;
import net.sf.picard.reference.IndexedFastaSequenceFile;
import net.sf.samtools.*;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.NGSPlatform;
import java.io.File;
import java.io.IOException;
import java.util.*;
/**
* Easy to use creator of artificial BAM files for testing
*
* Allows us to make a stream of reads or an index BAM file with read having the following properties
*
* - coming from n samples
* - of fixed read length and aligned to the genome with M operator
* - having N reads per alignment start
* - skipping N bases between each alignment start
* - starting at a given alignment start
*
* User: depristo
* Date: 1/15/13
* Time: 9:22 AM
*/
public class ArtificialBAMBuilder {
public final static int BAM_SHARD_SIZE = 16384;
private final IndexedFastaSequenceFile reference;
private final GenomeLocParser parser;
final int nReadsPerLocus;
final int nLoci;
int skipNLoci = 0;
int alignmentStart = 1;
int readLength = 10;
private final ArrayList<String> samples = new ArrayList<String>();
private LinkedList<GATKSAMRecord> additionalReads = new LinkedList<GATKSAMRecord>();
final SAMFileWriterFactory factory = new SAMFileWriterFactory();
{
factory.setCreateIndex(true);
}
SAMFileHeader header;
public ArtificialBAMBuilder(final IndexedFastaSequenceFile reference, int nReadsPerLocus, int nLoci) {
this.nReadsPerLocus = nReadsPerLocus;
this.nLoci = nLoci;
this.reference = reference;
this.parser = new GenomeLocParser(reference);
createAndSetHeader(1);
}
public ArtificialBAMBuilder(int nReadsPerLocus, int nLoci) {
this(ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000000).getSequenceDictionary(), nReadsPerLocus, nLoci);
}
public ArtificialBAMBuilder(final SAMSequenceDictionary dict, int nReadsPerLocus, int nLoci) {
this.nReadsPerLocus = nReadsPerLocus;
this.nLoci = nLoci;
this.reference = null;
this.parser = new GenomeLocParser(dict);
createAndSetHeader(1);
}
public IndexedFastaSequenceFile getReference() {
return reference;
}
public GenomeLocParser getGenomeLocParser() {
return parser;
}
public ArtificialBAMBuilder createAndSetHeader(final int nSamples) {
this.header = new SAMFileHeader();
header.setSortOrder(SAMFileHeader.SortOrder.coordinate);
header.setSequenceDictionary(parser.getContigs());
samples.clear();
for ( int i = 0; i < nSamples; i++ ) {
final GATKSAMReadGroupRecord rg = new GATKSAMReadGroupRecord("rg" + i);
final String sample = "sample" + i;
samples.add(sample);
rg.setSample(sample);
rg.setPlatform(NGSPlatform.ILLUMINA.getDefaultPlatform());
header.addReadGroup(rg);
}
return this;
}
public void addReads(final GATKSAMRecord readToAdd) {
additionalReads.add(readToAdd);
}
public void addReads(final Collection<GATKSAMRecord> readsToAdd) {
additionalReads.addAll(readsToAdd);
}
public List<String> getSamples() {
return samples;
}
/**
* Create a read stream based on the parameters. The cigar string for each
* read will be *M, where * is the length of the read.
*
* Useful for testing things like LocusIteratorBystate
*
* @return a ordered list of reads
*/
public List<GATKSAMRecord> makeReads() {
final String baseName = "read";
List<GATKSAMRecord> reads = new ArrayList<GATKSAMRecord>(nReadsPerLocus*nLoci);
for ( int locusI = 0; locusI < nLoci; locusI++) {
final int locus = locusI * (skipNLoci + 1);
for ( int readI = 0; readI < nReadsPerLocus; readI++ ) {
for ( final SAMReadGroupRecord rg : header.getReadGroups() ) {
final String readName = String.format("%s.%d.%d.%s", baseName, locus, readI, rg.getId());
final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, readName, 0, alignmentStart + locus, readLength);
read.setReadGroup(new GATKSAMReadGroupRecord(rg));
reads.add(read);
}
}
}
if ( ! additionalReads.isEmpty() ) {
reads.addAll(additionalReads);
Collections.sort(reads, new SAMRecordCoordinateComparator());
}
return reads;
}
/**
* Make an indexed BAM file contains the reads in the builder, marking it for deleteOnExit()
* @return the BAM file
*/
public File makeTemporarilyBAMFile() {
try {
final File file = File.createTempFile("tempBAM", ".bam");
file.deleteOnExit();
return makeBAMFile(file);
} catch ( IOException e ) {
throw new RuntimeException(e);
}
}
/**
* Write the reads from this builder to output, creating an index as well
* @param output the output BAM file we want to use
* @return
*/
public File makeBAMFile(final File output) {
final SAMFileWriter writer = factory.makeBAMWriter(header, true, output, 0);
for ( final GATKSAMRecord read : makeReads() )
writer.addAlignment(read);
writer.close();
return output;
}
public int getnReadsPerLocus() { return nReadsPerLocus; }
public int getnLoci() { return nLoci; }
public int getSkipNLoci() { return skipNLoci; }
public ArtificialBAMBuilder setSkipNLoci(int skipNLoci) { this.skipNLoci = skipNLoci; return this; }
public int getAlignmentStart() { return alignmentStart; }
public ArtificialBAMBuilder setAlignmentStart(int alignmentStart) { this.alignmentStart = alignmentStart; return this; }
public int getReadLength() { return readLength; }
public ArtificialBAMBuilder setReadLength(int readLength) { this.readLength = readLength; return this; }
public SAMFileHeader getHeader() { return header; }
public ArtificialBAMBuilder setHeader(SAMFileHeader header) { this.header = header; return this; }
public int getAlignmentEnd() {
return alignmentStart + nLoci * (skipNLoci + 1) + readLength;
}
public int getNSamples() { return samples.size(); }
public int expectedNumberOfReads() {
return nLoci * nReadsPerLocus * header.getReadGroups().size();
}
@Override
public String toString() {
return "ArtificialBAMBuilder{" +
"samples=" + samples +
", readLength=" + readLength +
", alignmentStart=" + alignmentStart +
", skipNLoci=" + skipNLoci +
", nLoci=" + nLoci +
", nReadsPerLocus=" + nReadsPerLocus +
'}';
}
}

View File

@ -30,6 +30,7 @@ import org.broadinstitute.sting.gatk.iterators.StingSAMIterator;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.Utils;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;
import org.broadinstitute.sting.utils.locusiterator.LocusIteratorByState;
import org.broadinstitute.sting.utils.pileup.PileupElement;
import org.broadinstitute.sting.utils.pileup.ReadBackedPileup;
import org.broadinstitute.sting.utils.pileup.ReadBackedPileupImpl;
@ -449,10 +450,10 @@ public class ArtificialSAMUtils {
final GATKSAMRecord left = pair.get(0);
final GATKSAMRecord right = pair.get(1);
pileupElements.add(new PileupElement(left, pos - leftStart, false, false, false, false, false, false));
pileupElements.add(LocusIteratorByState.createPileupForReadAndOffset(left, pos - leftStart));
if (pos >= right.getAlignmentStart() && pos <= right.getAlignmentEnd()) {
pileupElements.add(new PileupElement(right, pos - rightStart, false, false, false, false, false, false));
pileupElements.add(LocusIteratorByState.createPileupForReadAndOffset(right, pos - rightStart));
}
}

View File

@ -169,8 +169,8 @@ public class ReadUtils {
* @return whether or not the base is in the adaptor
*/
public static boolean isBaseInsideAdaptor(final GATKSAMRecord read, long basePos) {
Integer adaptorBoundary = getAdaptorBoundary(read);
if (adaptorBoundary == null || read.getInferredInsertSize() > DEFAULT_ADAPTOR_SIZE)
final int adaptorBoundary = getAdaptorBoundary(read);
if (adaptorBoundary == CANNOT_COMPUTE_ADAPTOR_BOUNDARY || read.getInferredInsertSize() > DEFAULT_ADAPTOR_SIZE)
return false;
return read.getReadNegativeStrandFlag() ? basePos <= adaptorBoundary : basePos >= adaptorBoundary;
@ -199,26 +199,28 @@ public class ReadUtils {
* in these cases the adaptor boundary is at the start of the read plus the inferred insert size (plus one)
*
* @param read the read being tested for the adaptor boundary
* @return the reference coordinate for the adaptor boundary (effectively the first base IN the adaptor, closest to the read. NULL if the read is unmapped or the mate is mapped to another contig.
* @return the reference coordinate for the adaptor boundary (effectively the first base IN the adaptor, closest to the read.
* CANNOT_COMPUTE_ADAPTOR_BOUNDARY if the read is unmapped or the mate is mapped to another contig.
*/
public static Integer getAdaptorBoundary(final SAMRecord read) {
public static int getAdaptorBoundary(final SAMRecord read) {
final int MAXIMUM_ADAPTOR_LENGTH = 8;
final int insertSize = Math.abs(read.getInferredInsertSize()); // the inferred insert size can be negative if the mate is mapped before the read (so we take the absolute value)
if (insertSize == 0 || read.getReadUnmappedFlag()) // no adaptors in reads with mates in another chromosome or unmapped pairs
return null;
return CANNOT_COMPUTE_ADAPTOR_BOUNDARY;
Integer adaptorBoundary; // the reference coordinate for the adaptor boundary (effectively the first base IN the adaptor, closest to the read)
int adaptorBoundary; // the reference coordinate for the adaptor boundary (effectively the first base IN the adaptor, closest to the read)
if (read.getReadNegativeStrandFlag())
adaptorBoundary = read.getMateAlignmentStart() - 1; // case 1 (see header)
else
adaptorBoundary = read.getAlignmentStart() + insertSize + 1; // case 2 (see header)
if ( (adaptorBoundary < read.getAlignmentStart() - MAXIMUM_ADAPTOR_LENGTH) || (adaptorBoundary > read.getAlignmentEnd() + MAXIMUM_ADAPTOR_LENGTH) )
adaptorBoundary = null; // we are being conservative by not allowing the adaptor boundary to go beyond what we belive is the maximum size of an adaptor
adaptorBoundary = CANNOT_COMPUTE_ADAPTOR_BOUNDARY; // we are being conservative by not allowing the adaptor boundary to go beyond what we belive is the maximum size of an adaptor
return adaptorBoundary;
}
public static int CANNOT_COMPUTE_ADAPTOR_BOUNDARY = Integer.MIN_VALUE;
/**
* is the read a 454 read?
@ -392,6 +394,11 @@ public class ReadUtils {
return getReadCoordinateForReferenceCoordinate(read.getSoftStart(), read.getCigar(), refCoord, tail, false);
}
public static int getReadCoordinateForReferenceCoordinateUpToEndOfRead(GATKSAMRecord read, int refCoord, ClippingTail tail) {
final int leftmostSafeVariantPosition = Math.max(read.getSoftStart(), refCoord);
return getReadCoordinateForReferenceCoordinate(read.getSoftStart(), read.getCigar(), leftmostSafeVariantPosition, tail, false);
}
public static int getReadCoordinateForReferenceCoordinate(final int alignmentStart, final Cigar cigar, final int refCoord, final ClippingTail tail, final boolean allowGoalNotReached) {
Pair<Integer, Boolean> result = getReadCoordinateForReferenceCoordinate(alignmentStart, cigar, refCoord, allowGoalNotReached);
int readCoord = result.getFirst();

View File

@ -26,6 +26,7 @@
package org.broadinstitute.variant.utils;
import net.sf.samtools.util.StringUtil;
import org.broadinstitute.sting.utils.exceptions.UserException;
import java.util.Arrays;
import java.util.Random;
@ -34,42 +35,66 @@ import java.util.Random;
* BaseUtils contains some basic utilities for manipulating nucleotides.
*/
public class BaseUtils {
public final static byte A = (byte) 'A';
public final static byte C = (byte) 'C';
public final static byte G = (byte) 'G';
public final static byte T = (byte) 'T';
public final static byte N = (byte) 'N';
public final static byte D = (byte) 'D';
public enum Base {
A ((byte)'A'),
C ((byte)'C'),
G ((byte)'G'),
T ((byte)'T'),
N ((byte)'N'),
D ((byte)'D');
//
// todo -- we need a generalized base abstraction using the Base enum.
//
public byte base;
private Base(final byte base) {
this.base = base;
}
}
// todo -- add this to the generalized base abstraction using the Base enum.
public final static byte[] BASES = {'A', 'C', 'G', 'T'};
public final static byte[] EXTENDED_BASES = {'A', 'C', 'G', 'T', 'N', 'D'};
static private final int[] baseIndexMap = new int[256];
static {
Arrays.fill(baseIndexMap, -1);
baseIndexMap['A'] = 0;
baseIndexMap['a'] = 0;
baseIndexMap['*'] = 0; // the wildcard character counts as an A
baseIndexMap['C'] = 1;
baseIndexMap['c'] = 1;
baseIndexMap['G'] = 2;
baseIndexMap['g'] = 2;
baseIndexMap['T'] = 3;
baseIndexMap['t'] = 3;
baseIndexMap['A'] = Base.A.ordinal();
baseIndexMap['a'] = Base.A.ordinal();
baseIndexMap['*'] = Base.A.ordinal(); // the wildcard character counts as an A
baseIndexMap['C'] = Base.C.ordinal();
baseIndexMap['c'] = Base.C.ordinal();
baseIndexMap['G'] = Base.G.ordinal();
baseIndexMap['g'] = Base.G.ordinal();
baseIndexMap['T'] = Base.T.ordinal();
baseIndexMap['t'] = Base.T.ordinal();
}
// todo -- fix me (enums?)
public static final byte DELETION_INDEX = 4;
public static final byte NO_CALL_INDEX = 5; // (this is 'N')
public static final int aIndex = BaseUtils.simpleBaseToBaseIndex((byte) 'A');
public static final int cIndex = BaseUtils.simpleBaseToBaseIndex((byte) 'C');
public static final int gIndex = BaseUtils.simpleBaseToBaseIndex((byte) 'G');
public static final int tIndex = BaseUtils.simpleBaseToBaseIndex((byte) 'T');
static private final int[] baseIndexWithIupacMap = baseIndexMap.clone();
static {
baseIndexWithIupacMap['*'] = -1; // the wildcard character is bad
baseIndexWithIupacMap['N'] = Base.N.ordinal();
baseIndexWithIupacMap['n'] = Base.N.ordinal();
baseIndexWithIupacMap['R'] = Base.N.ordinal();
baseIndexWithIupacMap['r'] = Base.N.ordinal();
baseIndexWithIupacMap['Y'] = Base.N.ordinal();
baseIndexWithIupacMap['y'] = Base.N.ordinal();
baseIndexWithIupacMap['M'] = Base.N.ordinal();
baseIndexWithIupacMap['m'] = Base.N.ordinal();
baseIndexWithIupacMap['K'] = Base.N.ordinal();
baseIndexWithIupacMap['k'] = Base.N.ordinal();
baseIndexWithIupacMap['W'] = Base.N.ordinal();
baseIndexWithIupacMap['w'] = Base.N.ordinal();
baseIndexWithIupacMap['S'] = Base.N.ordinal();
baseIndexWithIupacMap['s'] = Base.N.ordinal();
baseIndexWithIupacMap['B'] = Base.N.ordinal();
baseIndexWithIupacMap['b'] = Base.N.ordinal();
baseIndexWithIupacMap['D'] = Base.N.ordinal();
baseIndexWithIupacMap['d'] = Base.N.ordinal();
baseIndexWithIupacMap['H'] = Base.N.ordinal();
baseIndexWithIupacMap['h'] = Base.N.ordinal();
baseIndexWithIupacMap['V'] = Base.N.ordinal();
baseIndexWithIupacMap['v'] = Base.N.ordinal();
}
// Use a fixed random seed to allow for deterministic results when using random bases
private static final Random randomNumberGen = new Random(47382911L);
@ -96,10 +121,10 @@ public class BaseUtils {
}
public static boolean isTransition(byte base1, byte base2) {
int b1 = simpleBaseToBaseIndex(base1);
int b2 = simpleBaseToBaseIndex(base2);
return b1 == 0 && b2 == 2 || b1 == 2 && b2 == 0 ||
b1 == 1 && b2 == 3 || b1 == 3 && b2 == 1;
final int b1 = simpleBaseToBaseIndex(base1);
final int b2 = simpleBaseToBaseIndex(base2);
return b1 == Base.A.ordinal() && b2 == Base.G.ordinal() || b1 == Base.G.ordinal() && b2 == Base.A.ordinal() ||
b1 == Base.C.ordinal() && b2 == Base.T.ordinal() || b1 == Base.T.ordinal() && b2 == Base.C.ordinal();
}
public static boolean isTransversion(byte base1, byte base2) {
@ -141,6 +166,21 @@ public class BaseUtils {
return base >= 'A' && base <= 'Z';
}
public static byte[] convertIUPACtoN(final byte[] bases, final boolean errorOnBadReferenceBase, final boolean ignoreConversionOfFirstByte) {
final int length = bases.length;
final int start = ignoreConversionOfFirstByte ? 1 : 0;
for ( int i = start; i < length; i++ ) {
final int baseIndex = baseIndexWithIupacMap[bases[i]];
if ( baseIndex == Base.N.ordinal() ) {
bases[i] = 'N';
} else if ( errorOnBadReferenceBase && baseIndex == -1 ) {
throw new UserException.BadInput("We encountered a non-standard non-IUPAC base in the provided reference: '" + bases[i] + "'");
}
}
return bases;
}
/**
* Converts a IUPAC nucleotide code to a pair of bases
*
@ -231,10 +271,10 @@ public class BaseUtils {
switch (base) {
case 'd':
case 'D':
return DELETION_INDEX;
return Base.D.ordinal();
case 'n':
case 'N':
return NO_CALL_INDEX;
return Base.N.ordinal();
default:
return simpleBaseToBaseIndex(base);

View File

@ -111,7 +111,7 @@ public class Allele implements Comparable<Allele> {
/** A generic static NO_CALL allele for use */
// no public way to create an allele
private Allele(byte[] bases, boolean isRef) {
protected Allele(byte[] bases, boolean isRef) {
// null alleles are no longer allowed
if ( wouldBeNullAllele(bases) ) {
throw new IllegalArgumentException("Null alleles are not supported");
@ -140,7 +140,7 @@ public class Allele implements Comparable<Allele> {
throw new IllegalArgumentException("Unexpected base in allele bases \'" + new String(bases)+"\'");
}
private Allele(String bases, boolean isRef) {
protected Allele(String bases, boolean isRef) {
this(bases.getBytes(), isRef);
}

View File

@ -73,6 +73,10 @@ public class VCFHeader {
public static final String REFERENCE_KEY = "reference";
public static final String CONTIG_KEY = "contig";
public static final String INTERVALS_KEY = "intervals";
public static final String EXCLUDE_INTERVALS_KEY = "excludeIntervals";
public static final String INTERVAL_MERGING_KEY = "interval_merging";
public static final String INTERVAL_SET_RULE_KEY = "interval_set_rule";
public static final String INTERVAL_PADDING_KEY = "interval_padding";
// were the input samples sorted originally (or are we sorting them)?
private boolean samplesWereAlreadySorted = true;

View File

@ -26,23 +26,8 @@
package org.broadinstitute.sting.gatk.datasources.reads;
import com.google.caliper.Param;
import net.sf.picard.filter.FilteringIterator;
import net.sf.samtools.SAMFileHeader;
import net.sf.samtools.SAMFileReader;
import net.sf.samtools.SAMRecord;
import org.broadinstitute.sting.commandline.Tags;
import org.broadinstitute.sting.gatk.downsampling.DownsamplingMethod;
import org.broadinstitute.sting.gatk.ReadProperties;
import org.broadinstitute.sting.gatk.arguments.ValidationExclusion;
import org.broadinstitute.sting.gatk.filters.ReadFilter;
import org.broadinstitute.sting.gatk.filters.UnmappedReadFilter;
import org.broadinstitute.sting.gatk.iterators.LegacyLocusIteratorByState;
import org.broadinstitute.sting.gatk.iterators.ReadTransformer;
import org.broadinstitute.sting.gatk.walkers.qc.CountLoci;
import org.broadinstitute.sting.utils.GenomeLocParser;
import java.util.Collections;
import java.util.Iterator;
/**
* Created by IntelliJ IDEA.
@ -67,31 +52,32 @@ public class DownsamplerBenchmark extends ReadProcessingBenchmark {
@Param
private Downsampling downsampling;
public void timeDownsampling(int reps) {
for(int i = 0; i < reps; i++) {
SAMFileReader reader = new SAMFileReader(inputFile);
ReadProperties readProperties = new ReadProperties(Collections.<SAMReaderID>singletonList(new SAMReaderID(inputFile,new Tags())),
reader.getFileHeader(),
SAMFileHeader.SortOrder.coordinate,
false,
SAMFileReader.ValidationStringency.SILENT,
downsampling.create(),
new ValidationExclusion(Collections.singletonList(ValidationExclusion.TYPE.ALL)),
Collections.<ReadFilter>emptyList(),
Collections.<ReadTransformer>emptyList(),
false,
(byte)0);
GenomeLocParser genomeLocParser = new GenomeLocParser(reader.getFileHeader().getSequenceDictionary());
// Filter unmapped reads. TODO: is this always strictly necessary? Who in the GATK normally filters these out?
Iterator<SAMRecord> readIterator = new FilteringIterator(reader.iterator(),new UnmappedReadFilter());
LegacyLocusIteratorByState locusIteratorByState = new LegacyLocusIteratorByState(readIterator,readProperties,genomeLocParser, LegacyLocusIteratorByState.sampleListForSAMWithoutReadGroups());
while(locusIteratorByState.hasNext()) {
locusIteratorByState.next().getLocation();
}
reader.close();
}
}
// public void timeDownsampling(int reps) {
// for(int i = 0; i < reps; i++) {
// SAMFileReader reader = new SAMFileReader(inputFile);
// ReadProperties readProperties = new ReadProperties(Collections.<SAMReaderID>singletonList(new SAMReaderID(inputFile,new Tags())),
// reader.getFileHeader(),
// SAMFileHeader.SortOrder.coordinate,
// false,
// SAMFileReader.ValidationStringency.SILENT,
// downsampling.create(),
// new ValidationExclusion(Collections.singletonList(ValidationExclusion.TYPE.ALL)),
// Collections.<ReadFilter>emptyList(),
// Collections.<ReadTransformer>emptyList(),
// false,
// (byte)0,
// false);
//
// GenomeLocParser genomeLocParser = new GenomeLocParser(reader.getFileHeader().getSequenceDictionary());
// // Filter unmapped reads. TODO: is this always strictly necessary? Who in the GATK normally filters these out?
// Iterator<SAMRecord> readIterator = new FilteringIterator(reader.iterator(),new UnmappedReadFilter());
// LegacyLocusIteratorByState locusIteratorByState = new LegacyLocusIteratorByState(readIterator,readProperties,genomeLocParser, LegacyLocusIteratorByState.sampleListForSAMWithoutReadGroups());
// while(locusIteratorByState.hasNext()) {
// locusIteratorByState.next().getLocation();
// }
// reader.close();
// }
// }
private enum Downsampling {
NONE {

View File

@ -182,7 +182,8 @@ public class SAMDataSourceUnitTest extends BaseTest {
Collections.<ReadTransformer>emptyList(),
false,
(byte) -1,
removeProgramRecords);
removeProgramRecords,
false);
List<SAMProgramRecord> dontRemoveProgramRecords = data.getHeader().getProgramRecords();
assertEquals(dontRemoveProgramRecords, defaultProgramRecords, "testRemoveProgramRecords: default program records differ from removeProgramRecords = false");
@ -201,7 +202,8 @@ public class SAMDataSourceUnitTest extends BaseTest {
Collections.<ReadTransformer>emptyList(),
false,
(byte) -1,
removeProgramRecords);
removeProgramRecords,
false);
List<SAMProgramRecord> doRemoveProgramRecords = data.getHeader().getProgramRecords();
assertTrue(doRemoveProgramRecords.isEmpty(), "testRemoveProgramRecords: program records not cleared when removeProgramRecords = true");

View File

@ -1,505 +0,0 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.gatk.iterators;
import net.sf.samtools.*;
import net.sf.samtools.util.CloseableIterator;
import org.broadinstitute.sting.BaseTest;
import org.broadinstitute.sting.gatk.ReadProperties;
import org.broadinstitute.sting.gatk.arguments.ValidationExclusion;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.datasources.reads.SAMReaderID;
import org.broadinstitute.sting.gatk.filters.ReadFilter;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.Utils;
import org.broadinstitute.sting.utils.pileup.PileupElement;
import org.broadinstitute.sting.utils.pileup.ReadBackedPileup;
import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
/**
* testing of the LEGACY version of LocusIteratorByState
*/
public class LegacyLocusIteratorByStateUnitTest extends BaseTest {
private static SAMFileHeader header;
private LegacyLocusIteratorByState li;
private GenomeLocParser genomeLocParser;
@BeforeClass
public void beforeClass() {
header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
}
private LegacyLocusIteratorByState makeLTBS(List<SAMRecord> reads, ReadProperties readAttributes) {
return new LegacyLocusIteratorByState(new FakeCloseableIterator<SAMRecord>(reads.iterator()), readAttributes, genomeLocParser, LegacyLocusIteratorByState.sampleListForSAMWithoutReadGroups());
}
@Test
public void testXandEQOperators() {
final byte[] bases1 = new byte[] {'A','A','A','A','A','A','A','A','A','A'};
final byte[] bases2 = new byte[] {'A','A','A','C','A','A','A','A','A','C'};
// create a test version of the Reads object
ReadProperties readAttributes = createTestReadProperties();
SAMRecord r1 = ArtificialSAMUtils.createArtificialRead(header,"r1",0,1,10);
r1.setReadBases(bases1);
r1.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
r1.setCigarString("10M");
SAMRecord r2 = ArtificialSAMUtils.createArtificialRead(header,"r2",0,1,10);
r2.setReadBases(bases2);
r2.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20});
r2.setCigarString("3=1X5=1X");
SAMRecord r3 = ArtificialSAMUtils.createArtificialRead(header,"r3",0,1,10);
r3.setReadBases(bases2);
r3.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20});
r3.setCigarString("3=1X5M1X");
SAMRecord r4 = ArtificialSAMUtils.createArtificialRead(header,"r4",0,1,10);
r4.setReadBases(bases2);
r4.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
r4.setCigarString("10M");
List<SAMRecord> reads = Arrays.asList(r1, r2, r3, r4);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads,readAttributes);
while (li.hasNext()) {
AlignmentContext context = li.next();
ReadBackedPileup pileup = context.getBasePileup();
Assert.assertEquals(pileup.depthOfCoverage(), 4);
}
}
@Test
public void testIndelsInRegularPileup() {
final byte[] bases = new byte[] {'A','A','A','A','A','A','A','A','A','A'};
final byte[] indelBases = new byte[] {'A','A','A','A','C','T','A','A','A','A','A','A'};
// create a test version of the Reads object
ReadProperties readAttributes = createTestReadProperties();
SAMRecord before = ArtificialSAMUtils.createArtificialRead(header,"before",0,1,10);
before.setReadBases(bases);
before.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
before.setCigarString("10M");
SAMRecord during = ArtificialSAMUtils.createArtificialRead(header,"during",0,2,10);
during.setReadBases(indelBases);
during.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20});
during.setCigarString("4M2I6M");
SAMRecord after = ArtificialSAMUtils.createArtificialRead(header,"after",0,3,10);
after.setReadBases(bases);
after.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
after.setCigarString("10M");
List<SAMRecord> reads = Arrays.asList(before, during, after);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads,readAttributes);
boolean foundIndel = false;
while (li.hasNext()) {
AlignmentContext context = li.next();
ReadBackedPileup pileup = context.getBasePileup().getBaseFilteredPileup(10);
for (PileupElement p : pileup) {
if (p.isBeforeInsertion()) {
foundIndel = true;
Assert.assertEquals(p.getEventLength(), 2, "Wrong event length");
Assert.assertEquals(p.getEventBases(), "CT", "Inserted bases are incorrect");
break;
}
}
}
Assert.assertTrue(foundIndel,"Indel in pileup not found");
}
@Test
public void testWholeIndelReadInIsolation() {
final int firstLocus = 44367789;
// create a test version of the Reads object
ReadProperties readAttributes = createTestReadProperties();
SAMRecord indelOnlyRead = ArtificialSAMUtils.createArtificialRead(header, "indelOnly", 0, firstLocus, 76);
indelOnlyRead.setReadBases(Utils.dupBytes((byte)'A',76));
indelOnlyRead.setBaseQualities(Utils.dupBytes((byte) '@', 76));
indelOnlyRead.setCigarString("76I");
List<SAMRecord> reads = Arrays.asList(indelOnlyRead);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads, readAttributes);
// Traditionally, reads that end with indels bleed into the pileup at the following locus. Verify that the next pileup contains this read
// and considers it to be an indel-containing read.
Assert.assertTrue(li.hasNext(),"Should have found a whole-indel read in the normal base pileup without extended events enabled");
AlignmentContext alignmentContext = li.next();
Assert.assertEquals(alignmentContext.getLocation().getStart(), firstLocus, "Base pileup is at incorrect location.");
ReadBackedPileup basePileup = alignmentContext.getBasePileup();
Assert.assertEquals(basePileup.getReads().size(),1,"Pileup is of incorrect size");
Assert.assertSame(basePileup.getReads().get(0), indelOnlyRead, "Read in pileup is incorrect");
}
/**
* Test to make sure that reads supporting only an indel (example cigar string: 76I) do
* not negatively influence the ordering of the pileup.
*/
@Test
public void testWholeIndelRead() {
final int firstLocus = 44367788, secondLocus = firstLocus + 1;
SAMRecord leadingRead = ArtificialSAMUtils.createArtificialRead(header,"leading",0,firstLocus,76);
leadingRead.setReadBases(Utils.dupBytes((byte)'A',76));
leadingRead.setBaseQualities(Utils.dupBytes((byte)'@',76));
leadingRead.setCigarString("1M75I");
SAMRecord indelOnlyRead = ArtificialSAMUtils.createArtificialRead(header,"indelOnly",0,secondLocus,76);
indelOnlyRead.setReadBases(Utils.dupBytes((byte) 'A', 76));
indelOnlyRead.setBaseQualities(Utils.dupBytes((byte)'@',76));
indelOnlyRead.setCigarString("76I");
SAMRecord fullMatchAfterIndel = ArtificialSAMUtils.createArtificialRead(header,"fullMatch",0,secondLocus,76);
fullMatchAfterIndel.setReadBases(Utils.dupBytes((byte)'A',76));
fullMatchAfterIndel.setBaseQualities(Utils.dupBytes((byte)'@',76));
fullMatchAfterIndel.setCigarString("75I1M");
List<SAMRecord> reads = Arrays.asList(leadingRead, indelOnlyRead, fullMatchAfterIndel);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads, createTestReadProperties());
int currentLocus = firstLocus;
int numAlignmentContextsFound = 0;
while(li.hasNext()) {
AlignmentContext alignmentContext = li.next();
Assert.assertEquals(alignmentContext.getLocation().getStart(),currentLocus,"Current locus returned by alignment context is incorrect");
if(currentLocus == firstLocus) {
List<GATKSAMRecord> readsAtLocus = alignmentContext.getBasePileup().getReads();
Assert.assertEquals(readsAtLocus.size(),1,"Wrong number of reads at locus " + currentLocus);
Assert.assertSame(readsAtLocus.get(0),leadingRead,"leadingRead absent from pileup at locus " + currentLocus);
}
else if(currentLocus == secondLocus) {
List<GATKSAMRecord> readsAtLocus = alignmentContext.getBasePileup().getReads();
Assert.assertEquals(readsAtLocus.size(),2,"Wrong number of reads at locus " + currentLocus);
Assert.assertSame(readsAtLocus.get(0),indelOnlyRead,"indelOnlyRead absent from pileup at locus " + currentLocus);
Assert.assertSame(readsAtLocus.get(1),fullMatchAfterIndel,"fullMatchAfterIndel absent from pileup at locus " + currentLocus);
}
currentLocus++;
numAlignmentContextsFound++;
}
Assert.assertEquals(numAlignmentContextsFound, 2, "Found incorrect number of alignment contexts");
}
/**
* Test to make sure that reads supporting only an indel (example cigar string: 76I) are represented properly
*/
@Test
public void testWholeIndelReadRepresentedTest() {
final int firstLocus = 44367788, secondLocus = firstLocus + 1;
SAMRecord read1 = ArtificialSAMUtils.createArtificialRead(header,"read1",0,secondLocus,1);
read1.setReadBases(Utils.dupBytes((byte) 'A', 1));
read1.setBaseQualities(Utils.dupBytes((byte) '@', 1));
read1.setCigarString("1I");
List<SAMRecord> reads = Arrays.asList(read1);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads, createTestReadProperties());
while(li.hasNext()) {
AlignmentContext alignmentContext = li.next();
ReadBackedPileup p = alignmentContext.getBasePileup();
Assert.assertTrue(p.getNumberOfElements() == 1);
PileupElement pe = p.iterator().next();
Assert.assertTrue(pe.isBeforeInsertion());
Assert.assertFalse(pe.isAfterInsertion());
Assert.assertEquals(pe.getEventBases(), "A");
}
SAMRecord read2 = ArtificialSAMUtils.createArtificialRead(header,"read2",0,secondLocus,10);
read2.setReadBases(Utils.dupBytes((byte) 'A', 10));
read2.setBaseQualities(Utils.dupBytes((byte) '@', 10));
read2.setCigarString("10I");
reads = Arrays.asList(read2);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads, createTestReadProperties());
while(li.hasNext()) {
AlignmentContext alignmentContext = li.next();
ReadBackedPileup p = alignmentContext.getBasePileup();
Assert.assertTrue(p.getNumberOfElements() == 1);
PileupElement pe = p.iterator().next();
Assert.assertTrue(pe.isBeforeInsertion());
Assert.assertFalse(pe.isAfterInsertion());
Assert.assertEquals(pe.getEventBases(), "AAAAAAAAAA");
}
}
////////////////////////////////////////////
// comprehensive LIBS/PileupElement tests //
////////////////////////////////////////////
private static class LIBSTest {
final String cigar;
final int readLength;
private LIBSTest(final String cigar, final int readLength) {
this.cigar = cigar;
this.readLength = readLength;
}
}
@DataProvider(name = "LIBSTest")
public Object[][] createLIBSTestData() {
//TODO -- when LIBS is fixed this should be replaced to provide all possible permutations of CIGAR strings
return new Object[][]{
{new LIBSTest("1I", 1)},
{new LIBSTest("10I", 10)},
{new LIBSTest("2M2I2M", 6)},
{new LIBSTest("2M2I", 4)},
//TODO -- uncomment these when LIBS is fixed
//{new LIBSTest("2I2M", 4, Arrays.asList(2,3), Arrays.asList(IS_AFTER_INSERTION_FLAG,0))},
//{new LIBSTest("1I1M1D1M", 3, Arrays.asList(0,1), Arrays.asList(IS_AFTER_INSERTION_FLAG | IS_BEFORE_DELETION_START_FLAG | IS_BEFORE_DELETED_BASE_FLAG,IS_AFTER_DELETED_BASE_FLAG | IS_AFTER_DELETION_END_FLAG))},
//{new LIBSTest("1S1I1M", 3, Arrays.asList(2), Arrays.asList(IS_AFTER_INSERTION_FLAG))},
//{new LIBSTest("1M2D2M", 3)},
{new LIBSTest("1S1M", 2)},
{new LIBSTest("1M1S", 2)},
{new LIBSTest("1S1M1I", 3)}
};
}
@Test(dataProvider = "LIBSTest")
public void testLIBS(LIBSTest params) {
final int locus = 44367788;
SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, locus, params.readLength);
read.setReadBases(Utils.dupBytes((byte) 'A', params.readLength));
read.setBaseQualities(Utils.dupBytes((byte) '@', params.readLength));
read.setCigarString(params.cigar);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(Arrays.asList(read), createTestReadProperties());
final LIBS_position tester = new LIBS_position(read);
while ( li.hasNext() ) {
AlignmentContext alignmentContext = li.next();
ReadBackedPileup p = alignmentContext.getBasePileup();
Assert.assertTrue(p.getNumberOfElements() == 1);
PileupElement pe = p.iterator().next();
tester.stepForwardOnGenome();
Assert.assertEquals(pe.isBeforeDeletedBase(), tester.isBeforeDeletedBase);
Assert.assertEquals(pe.isBeforeDeletionStart(), tester.isBeforeDeletionStart);
Assert.assertEquals(pe.isAfterDeletedBase(), tester.isAfterDeletedBase);
Assert.assertEquals(pe.isAfterDeletionEnd(), tester.isAfterDeletionEnd);
Assert.assertEquals(pe.isBeforeInsertion(), tester.isBeforeInsertion);
Assert.assertEquals(pe.isAfterInsertion(), tester.isAfterInsertion);
Assert.assertEquals(pe.isNextToSoftClip(), tester.isNextToSoftClip);
Assert.assertEquals(pe.getOffset(), tester.getCurrentReadOffset());
}
}
////////////////////////////////////////////////
// End comprehensive LIBS/PileupElement tests //
////////////////////////////////////////////////
private static ReadProperties createTestReadProperties() {
return new ReadProperties(
Collections.<SAMReaderID>emptyList(),
new SAMFileHeader(),
SAMFileHeader.SortOrder.coordinate,
false,
SAMFileReader.ValidationStringency.STRICT,
null,
new ValidationExclusion(),
Collections.<ReadFilter>emptyList(),
Collections.<ReadTransformer>emptyList(),
false,
(byte) -1
);
}
}
class FakeCloseableIterator<T> implements CloseableIterator<T> {
Iterator<T> iterator;
public FakeCloseableIterator(Iterator<T> it) {
iterator = it;
}
@Override
public void close() {}
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public T next() {
return iterator.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("Don't remove!");
}
}
final class LIBS_position {
SAMRecord read;
final int numOperators;
int currentOperatorIndex = 0;
int currentPositionOnOperator = 0;
int currentReadOffset = 0;
boolean isBeforeDeletionStart = false;
boolean isBeforeDeletedBase = false;
boolean isAfterDeletionEnd = false;
boolean isAfterDeletedBase = false;
boolean isBeforeInsertion = false;
boolean isAfterInsertion = false;
boolean isNextToSoftClip = false;
boolean sawMop = false;
public LIBS_position(final SAMRecord read) {
this.read = read;
numOperators = read.getCigar().numCigarElements();
}
public int getCurrentReadOffset() {
return Math.max(0, currentReadOffset - 1);
}
/**
* Steps forward on the genome. Returns false when done reading the read, true otherwise.
*/
public boolean stepForwardOnGenome() {
if ( currentOperatorIndex == numOperators )
return false;
CigarElement curElement = read.getCigar().getCigarElement(currentOperatorIndex);
if ( currentPositionOnOperator >= curElement.getLength() ) {
if ( ++currentOperatorIndex == numOperators )
return false;
curElement = read.getCigar().getCigarElement(currentOperatorIndex);
currentPositionOnOperator = 0;
}
switch ( curElement.getOperator() ) {
case I: // insertion w.r.t. the reference
if ( !sawMop )
break;
case S: // soft clip
currentReadOffset += curElement.getLength();
case H: // hard clip
case P: // padding
currentOperatorIndex++;
return stepForwardOnGenome();
case D: // deletion w.r.t. the reference
case N: // reference skip (looks and gets processed just like a "deletion", just different logical meaning)
currentPositionOnOperator++;
break;
case M:
case EQ:
case X:
sawMop = true;
currentReadOffset++;
currentPositionOnOperator++;
break;
default:
throw new IllegalStateException("No support for cigar op: " + curElement.getOperator());
}
final boolean isFirstOp = currentOperatorIndex == 0;
final boolean isLastOp = currentOperatorIndex == numOperators - 1;
final boolean isFirstBaseOfOp = currentPositionOnOperator == 1;
final boolean isLastBaseOfOp = currentPositionOnOperator == curElement.getLength();
isBeforeDeletionStart = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.D, isLastOp, isLastBaseOfOp);
isBeforeDeletedBase = isBeforeDeletionStart || (!isLastBaseOfOp && curElement.getOperator() == CigarOperator.D);
isAfterDeletionEnd = isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.D, isFirstOp, isFirstBaseOfOp);
isAfterDeletedBase = isAfterDeletionEnd || (!isFirstBaseOfOp && curElement.getOperator() == CigarOperator.D);
isBeforeInsertion = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.I, isLastOp, isLastBaseOfOp)
|| (!sawMop && curElement.getOperator() == CigarOperator.I);
isAfterInsertion = isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.I, isFirstOp, isFirstBaseOfOp);
isNextToSoftClip = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.S, isLastOp, isLastBaseOfOp)
|| isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.S, isFirstOp, isFirstBaseOfOp);
return true;
}
private static boolean isBeforeOp(final Cigar cigar,
final int currentOperatorIndex,
final CigarOperator op,
final boolean isLastOp,
final boolean isLastBaseOfOp) {
return !isLastOp && isLastBaseOfOp && cigar.getCigarElement(currentOperatorIndex+1).getOperator() == op;
}
private static boolean isAfterOp(final Cigar cigar,
final int currentOperatorIndex,
final CigarOperator op,
final boolean isFirstOp,
final boolean isFirstBaseOfOp) {
return !isFirstOp && isFirstBaseOfOp && cigar.getCigarElement(currentOperatorIndex-1).getOperator() == op;
}
}

View File

@ -1,673 +0,0 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.gatk.iterators;
import net.sf.samtools.*;
import net.sf.samtools.util.CloseableIterator;
import org.broadinstitute.sting.BaseTest;
import org.broadinstitute.sting.gatk.ReadProperties;
import org.broadinstitute.sting.gatk.arguments.ValidationExclusion;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.datasources.reads.SAMReaderID;
import org.broadinstitute.sting.gatk.downsampling.DownsamplingMethod;
import org.broadinstitute.sting.gatk.filters.ReadFilter;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.MathUtils;
import org.broadinstitute.sting.utils.Utils;
import org.broadinstitute.sting.utils.pileup.PileupElement;
import org.broadinstitute.sting.utils.pileup.ReadBackedPileup;
import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.util.*;
/**
* testing of the new (non-legacy) version of LocusIteratorByState
*/
public class LocusIteratorByStateUnitTest extends BaseTest {
private static SAMFileHeader header;
private LocusIteratorByState li;
private GenomeLocParser genomeLocParser;
@BeforeClass
public void beforeClass() {
header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
}
private LocusIteratorByState makeLTBS(List<SAMRecord> reads, ReadProperties readAttributes) {
return new LocusIteratorByState(new FakeCloseableIterator<SAMRecord>(reads.iterator()), readAttributes, genomeLocParser, LocusIteratorByState.sampleListForSAMWithoutReadGroups());
}
@Test
public void testXandEQOperators() {
final byte[] bases1 = new byte[] {'A','A','A','A','A','A','A','A','A','A'};
final byte[] bases2 = new byte[] {'A','A','A','C','A','A','A','A','A','C'};
// create a test version of the Reads object
ReadProperties readAttributes = createTestReadProperties();
SAMRecord r1 = ArtificialSAMUtils.createArtificialRead(header,"r1",0,1,10);
r1.setReadBases(bases1);
r1.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
r1.setCigarString("10M");
SAMRecord r2 = ArtificialSAMUtils.createArtificialRead(header,"r2",0,1,10);
r2.setReadBases(bases2);
r2.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20});
r2.setCigarString("3=1X5=1X");
SAMRecord r3 = ArtificialSAMUtils.createArtificialRead(header,"r3",0,1,10);
r3.setReadBases(bases2);
r3.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20});
r3.setCigarString("3=1X5M1X");
SAMRecord r4 = ArtificialSAMUtils.createArtificialRead(header,"r4",0,1,10);
r4.setReadBases(bases2);
r4.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
r4.setCigarString("10M");
List<SAMRecord> reads = Arrays.asList(r1, r2, r3, r4);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads,readAttributes);
while (li.hasNext()) {
AlignmentContext context = li.next();
ReadBackedPileup pileup = context.getBasePileup();
Assert.assertEquals(pileup.depthOfCoverage(), 4);
}
}
@Test
public void testIndelsInRegularPileup() {
final byte[] bases = new byte[] {'A','A','A','A','A','A','A','A','A','A'};
final byte[] indelBases = new byte[] {'A','A','A','A','C','T','A','A','A','A','A','A'};
// create a test version of the Reads object
ReadProperties readAttributes = createTestReadProperties();
SAMRecord before = ArtificialSAMUtils.createArtificialRead(header,"before",0,1,10);
before.setReadBases(bases);
before.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
before.setCigarString("10M");
SAMRecord during = ArtificialSAMUtils.createArtificialRead(header,"during",0,2,10);
during.setReadBases(indelBases);
during.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20});
during.setCigarString("4M2I6M");
SAMRecord after = ArtificialSAMUtils.createArtificialRead(header,"after",0,3,10);
after.setReadBases(bases);
after.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
after.setCigarString("10M");
List<SAMRecord> reads = Arrays.asList(before, during, after);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads,readAttributes);
boolean foundIndel = false;
while (li.hasNext()) {
AlignmentContext context = li.next();
ReadBackedPileup pileup = context.getBasePileup().getBaseFilteredPileup(10);
for (PileupElement p : pileup) {
if (p.isBeforeInsertion()) {
foundIndel = true;
Assert.assertEquals(p.getEventLength(), 2, "Wrong event length");
Assert.assertEquals(p.getEventBases(), "CT", "Inserted bases are incorrect");
break;
}
}
}
Assert.assertTrue(foundIndel,"Indel in pileup not found");
}
@Test
public void testWholeIndelReadInIsolation() {
final int firstLocus = 44367789;
// create a test version of the Reads object
ReadProperties readAttributes = createTestReadProperties();
SAMRecord indelOnlyRead = ArtificialSAMUtils.createArtificialRead(header, "indelOnly", 0, firstLocus, 76);
indelOnlyRead.setReadBases(Utils.dupBytes((byte)'A',76));
indelOnlyRead.setBaseQualities(Utils.dupBytes((byte) '@', 76));
indelOnlyRead.setCigarString("76I");
List<SAMRecord> reads = Arrays.asList(indelOnlyRead);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads, readAttributes);
// Traditionally, reads that end with indels bleed into the pileup at the following locus. Verify that the next pileup contains this read
// and considers it to be an indel-containing read.
Assert.assertTrue(li.hasNext(),"Should have found a whole-indel read in the normal base pileup without extended events enabled");
AlignmentContext alignmentContext = li.next();
Assert.assertEquals(alignmentContext.getLocation().getStart(), firstLocus, "Base pileup is at incorrect location.");
ReadBackedPileup basePileup = alignmentContext.getBasePileup();
Assert.assertEquals(basePileup.getReads().size(),1,"Pileup is of incorrect size");
Assert.assertSame(basePileup.getReads().get(0), indelOnlyRead, "Read in pileup is incorrect");
}
/**
* Test to make sure that reads supporting only an indel (example cigar string: 76I) do
* not negatively influence the ordering of the pileup.
*/
@Test
public void testWholeIndelRead() {
final int firstLocus = 44367788, secondLocus = firstLocus + 1;
SAMRecord leadingRead = ArtificialSAMUtils.createArtificialRead(header,"leading",0,firstLocus,76);
leadingRead.setReadBases(Utils.dupBytes((byte)'A',76));
leadingRead.setBaseQualities(Utils.dupBytes((byte)'@',76));
leadingRead.setCigarString("1M75I");
SAMRecord indelOnlyRead = ArtificialSAMUtils.createArtificialRead(header,"indelOnly",0,secondLocus,76);
indelOnlyRead.setReadBases(Utils.dupBytes((byte) 'A', 76));
indelOnlyRead.setBaseQualities(Utils.dupBytes((byte)'@',76));
indelOnlyRead.setCigarString("76I");
SAMRecord fullMatchAfterIndel = ArtificialSAMUtils.createArtificialRead(header,"fullMatch",0,secondLocus,76);
fullMatchAfterIndel.setReadBases(Utils.dupBytes((byte)'A',76));
fullMatchAfterIndel.setBaseQualities(Utils.dupBytes((byte)'@',76));
fullMatchAfterIndel.setCigarString("75I1M");
List<SAMRecord> reads = Arrays.asList(leadingRead, indelOnlyRead, fullMatchAfterIndel);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads, createTestReadProperties());
int currentLocus = firstLocus;
int numAlignmentContextsFound = 0;
while(li.hasNext()) {
AlignmentContext alignmentContext = li.next();
Assert.assertEquals(alignmentContext.getLocation().getStart(),currentLocus,"Current locus returned by alignment context is incorrect");
if(currentLocus == firstLocus) {
List<GATKSAMRecord> readsAtLocus = alignmentContext.getBasePileup().getReads();
Assert.assertEquals(readsAtLocus.size(),1,"Wrong number of reads at locus " + currentLocus);
Assert.assertSame(readsAtLocus.get(0),leadingRead,"leadingRead absent from pileup at locus " + currentLocus);
}
else if(currentLocus == secondLocus) {
List<GATKSAMRecord> readsAtLocus = alignmentContext.getBasePileup().getReads();
Assert.assertEquals(readsAtLocus.size(),2,"Wrong number of reads at locus " + currentLocus);
Assert.assertSame(readsAtLocus.get(0),indelOnlyRead,"indelOnlyRead absent from pileup at locus " + currentLocus);
Assert.assertSame(readsAtLocus.get(1),fullMatchAfterIndel,"fullMatchAfterIndel absent from pileup at locus " + currentLocus);
}
currentLocus++;
numAlignmentContextsFound++;
}
Assert.assertEquals(numAlignmentContextsFound, 2, "Found incorrect number of alignment contexts");
}
/**
* Test to make sure that reads supporting only an indel (example cigar string: 76I) are represented properly
*/
@Test
public void testWholeIndelReadRepresentedTest() {
final int firstLocus = 44367788, secondLocus = firstLocus + 1;
SAMRecord read1 = ArtificialSAMUtils.createArtificialRead(header,"read1",0,secondLocus,1);
read1.setReadBases(Utils.dupBytes((byte) 'A', 1));
read1.setBaseQualities(Utils.dupBytes((byte) '@', 1));
read1.setCigarString("1I");
List<SAMRecord> reads = Arrays.asList(read1);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads, createTestReadProperties());
while(li.hasNext()) {
AlignmentContext alignmentContext = li.next();
ReadBackedPileup p = alignmentContext.getBasePileup();
Assert.assertTrue(p.getNumberOfElements() == 1);
PileupElement pe = p.iterator().next();
Assert.assertTrue(pe.isBeforeInsertion());
Assert.assertFalse(pe.isAfterInsertion());
Assert.assertEquals(pe.getEventBases(), "A");
}
SAMRecord read2 = ArtificialSAMUtils.createArtificialRead(header,"read2",0,secondLocus,10);
read2.setReadBases(Utils.dupBytes((byte) 'A', 10));
read2.setBaseQualities(Utils.dupBytes((byte) '@', 10));
read2.setCigarString("10I");
reads = Arrays.asList(read2);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads, createTestReadProperties());
while(li.hasNext()) {
AlignmentContext alignmentContext = li.next();
ReadBackedPileup p = alignmentContext.getBasePileup();
Assert.assertTrue(p.getNumberOfElements() == 1);
PileupElement pe = p.iterator().next();
Assert.assertTrue(pe.isBeforeInsertion());
Assert.assertFalse(pe.isAfterInsertion());
Assert.assertEquals(pe.getEventBases(), "AAAAAAAAAA");
}
}
////////////////////////////////////////////
// comprehensive LIBS/PileupElement tests //
////////////////////////////////////////////
private static class LIBSTest {
final String cigar;
final int readLength;
private LIBSTest(final String cigar, final int readLength) {
this.cigar = cigar;
this.readLength = readLength;
}
}
@DataProvider(name = "LIBSTest")
public Object[][] createLIBSTestData() {
//TODO -- when LIBS is fixed this should be replaced to provide all possible permutations of CIGAR strings
return new Object[][]{
{new LIBSTest("1I", 1)},
{new LIBSTest("10I", 10)},
{new LIBSTest("2M2I2M", 6)},
{new LIBSTest("2M2I", 4)},
//TODO -- uncomment these when LIBS is fixed
//{new LIBSTest("2I2M", 4, Arrays.asList(2,3), Arrays.asList(IS_AFTER_INSERTION_FLAG,0))},
//{new LIBSTest("1I1M1D1M", 3, Arrays.asList(0,1), Arrays.asList(IS_AFTER_INSERTION_FLAG | IS_BEFORE_DELETION_START_FLAG | IS_BEFORE_DELETED_BASE_FLAG,IS_AFTER_DELETED_BASE_FLAG | IS_AFTER_DELETION_END_FLAG))},
//{new LIBSTest("1S1I1M", 3, Arrays.asList(2), Arrays.asList(IS_AFTER_INSERTION_FLAG))},
//{new LIBSTest("1M2D2M", 3)},
{new LIBSTest("1S1M", 2)},
{new LIBSTest("1M1S", 2)},
{new LIBSTest("1S1M1I", 3)}
};
}
@Test(dataProvider = "LIBSTest")
public void testLIBS(LIBSTest params) {
final int locus = 44367788;
SAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, locus, params.readLength);
read.setReadBases(Utils.dupBytes((byte) 'A', params.readLength));
read.setBaseQualities(Utils.dupBytes((byte) '@', params.readLength));
read.setCigarString(params.cigar);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(Arrays.asList(read), createTestReadProperties());
final LIBS_position tester = new LIBS_position(read);
while ( li.hasNext() ) {
AlignmentContext alignmentContext = li.next();
ReadBackedPileup p = alignmentContext.getBasePileup();
Assert.assertTrue(p.getNumberOfElements() == 1);
PileupElement pe = p.iterator().next();
tester.stepForwardOnGenome();
Assert.assertEquals(pe.isBeforeDeletedBase(), tester.isBeforeDeletedBase);
Assert.assertEquals(pe.isBeforeDeletionStart(), tester.isBeforeDeletionStart);
Assert.assertEquals(pe.isAfterDeletedBase(), tester.isAfterDeletedBase);
Assert.assertEquals(pe.isAfterDeletionEnd(), tester.isAfterDeletionEnd);
Assert.assertEquals(pe.isBeforeInsertion(), tester.isBeforeInsertion);
Assert.assertEquals(pe.isAfterInsertion(), tester.isAfterInsertion);
Assert.assertEquals(pe.isNextToSoftClip(), tester.isNextToSoftClip);
Assert.assertEquals(pe.getOffset(), tester.getCurrentReadOffset());
}
}
////////////////////////////////////////////////
// End comprehensive LIBS/PileupElement tests //
////////////////////////////////////////////////
///////////////////////////////////////
// Read State Manager Tests //
///////////////////////////////////////
private class PerSampleReadStateManagerTest extends TestDataProvider {
private List<Integer> readCountsPerAlignmentStart;
private List<SAMRecord> reads;
private List<ArrayList<LocusIteratorByState.SAMRecordState>> recordStatesByAlignmentStart;
private int removalInterval;
public PerSampleReadStateManagerTest( List<Integer> readCountsPerAlignmentStart, int removalInterval ) {
super(PerSampleReadStateManagerTest.class);
this.readCountsPerAlignmentStart = readCountsPerAlignmentStart;
this.removalInterval = removalInterval;
reads = new ArrayList<SAMRecord>();
recordStatesByAlignmentStart = new ArrayList<ArrayList<LocusIteratorByState.SAMRecordState>>();
setName(String.format("%s: readCountsPerAlignmentStart: %s removalInterval: %d",
getClass().getSimpleName(), readCountsPerAlignmentStart, removalInterval));
}
public void run() {
LocusIteratorByState libs = makeLTBS(new ArrayList<SAMRecord>(), createTestReadProperties());
LocusIteratorByState.ReadStateManager readStateManager =
libs.new ReadStateManager(new ArrayList<SAMRecord>().iterator());
LocusIteratorByState.ReadStateManager.PerSampleReadStateManager perSampleReadStateManager =
readStateManager.new PerSampleReadStateManager();
makeReads();
for ( ArrayList<LocusIteratorByState.SAMRecordState> stackRecordStates : recordStatesByAlignmentStart ) {
perSampleReadStateManager.addStatesAtNextAlignmentStart(stackRecordStates);
}
// read state manager should have the right number of reads
Assert.assertEquals(reads.size(), perSampleReadStateManager.size());
Iterator<SAMRecord> originalReadsIterator = reads.iterator();
Iterator<LocusIteratorByState.SAMRecordState> recordStateIterator = perSampleReadStateManager.iterator();
int recordStateCount = 0;
int numReadStatesRemoved = 0;
// Do a first-pass validation of the record state iteration by making sure we get back everything we
// put in, in the same order, doing any requested removals of read states along the way
while ( recordStateIterator.hasNext() ) {
LocusIteratorByState.SAMRecordState readState = recordStateIterator.next();
recordStateCount++;
SAMRecord readFromPerSampleReadStateManager = readState.getRead();
Assert.assertTrue(originalReadsIterator.hasNext());
SAMRecord originalRead = originalReadsIterator.next();
// The read we get back should be literally the same read in memory as we put in
Assert.assertTrue(originalRead == readFromPerSampleReadStateManager);
// If requested, remove a read state every removalInterval states
if ( removalInterval > 0 && recordStateCount % removalInterval == 0 ) {
recordStateIterator.remove();
numReadStatesRemoved++;
}
}
Assert.assertFalse(originalReadsIterator.hasNext());
// If we removed any read states, do a second pass through the read states to make sure the right
// states were removed
if ( numReadStatesRemoved > 0 ) {
Assert.assertEquals(perSampleReadStateManager.size(), reads.size() - numReadStatesRemoved);
originalReadsIterator = reads.iterator();
recordStateIterator = perSampleReadStateManager.iterator();
int readCount = 0;
int readStateCount = 0;
// Match record states with the reads that should remain after removal
while ( recordStateIterator.hasNext() ) {
LocusIteratorByState.SAMRecordState readState = recordStateIterator.next();
readStateCount++;
SAMRecord readFromPerSampleReadStateManager = readState.getRead();
Assert.assertTrue(originalReadsIterator.hasNext());
SAMRecord originalRead = originalReadsIterator.next();
readCount++;
if ( readCount % removalInterval == 0 ) {
originalRead = originalReadsIterator.next(); // advance to next read, since the previous one should have been discarded
readCount++;
}
// The read we get back should be literally the same read in memory as we put in (after accounting for removals)
Assert.assertTrue(originalRead == readFromPerSampleReadStateManager);
}
Assert.assertEquals(readStateCount, reads.size() - numReadStatesRemoved);
}
// Allow memory used by this test to be reclaimed
readCountsPerAlignmentStart = null;
reads = null;
recordStatesByAlignmentStart = null;
}
private void makeReads() {
int alignmentStart = 1;
for ( int readsThisStack : readCountsPerAlignmentStart ) {
ArrayList<SAMRecord> stackReads = new ArrayList<SAMRecord>(ArtificialSAMUtils.createStackOfIdenticalArtificialReads(readsThisStack, header, "foo", 0, alignmentStart, MathUtils.randomIntegerInRange(50, 100)));
ArrayList<LocusIteratorByState.SAMRecordState> stackRecordStates = new ArrayList<LocusIteratorByState.SAMRecordState>();
for ( SAMRecord read : stackReads ) {
stackRecordStates.add(new LocusIteratorByState.SAMRecordState(read));
}
reads.addAll(stackReads);
recordStatesByAlignmentStart.add(stackRecordStates);
}
}
}
@DataProvider(name = "PerSampleReadStateManagerTestDataProvider")
public Object[][] createPerSampleReadStateManagerTests() {
for ( List<Integer> thisTestReadStateCounts : Arrays.asList( Arrays.asList(1),
Arrays.asList(2),
Arrays.asList(10),
Arrays.asList(1, 1),
Arrays.asList(2, 2),
Arrays.asList(10, 10),
Arrays.asList(1, 10),
Arrays.asList(10, 1),
Arrays.asList(1, 1, 1),
Arrays.asList(2, 2, 2),
Arrays.asList(10, 10, 10),
Arrays.asList(1, 1, 1, 1, 1, 1),
Arrays.asList(10, 10, 10, 10, 10, 10),
Arrays.asList(1, 2, 10, 1, 2, 10)
) ) {
for ( int removalInterval : Arrays.asList(0, 2, 3) ) {
new PerSampleReadStateManagerTest(thisTestReadStateCounts, removalInterval);
}
}
return PerSampleReadStateManagerTest.getTests(PerSampleReadStateManagerTest.class);
}
@Test(dataProvider = "PerSampleReadStateManagerTestDataProvider")
public void runPerSampleReadStateManagerTest( PerSampleReadStateManagerTest test ) {
logger.warn("Running test: " + test);
test.run();
}
///////////////////////////////////////
// End Read State Manager Tests //
///////////////////////////////////////
///////////////////////////////////////
// Helper methods / classes //
///////////////////////////////////////
private static ReadProperties createTestReadProperties() {
return createTestReadProperties(null);
}
private static ReadProperties createTestReadProperties( DownsamplingMethod downsamplingMethod ) {
return new ReadProperties(
Collections.<SAMReaderID>emptyList(),
new SAMFileHeader(),
SAMFileHeader.SortOrder.coordinate,
false,
SAMFileReader.ValidationStringency.STRICT,
downsamplingMethod,
new ValidationExclusion(),
Collections.<ReadFilter>emptyList(),
Collections.<ReadTransformer>emptyList(),
false,
(byte) -1
);
}
private static class FakeCloseableIterator<T> implements CloseableIterator<T> {
Iterator<T> iterator;
public FakeCloseableIterator(Iterator<T> it) {
iterator = it;
}
@Override
public void close() {}
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public T next() {
return iterator.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("Don't remove!");
}
}
private static final class LIBS_position {
SAMRecord read;
final int numOperators;
int currentOperatorIndex = 0;
int currentPositionOnOperator = 0;
int currentReadOffset = 0;
boolean isBeforeDeletionStart = false;
boolean isBeforeDeletedBase = false;
boolean isAfterDeletionEnd = false;
boolean isAfterDeletedBase = false;
boolean isBeforeInsertion = false;
boolean isAfterInsertion = false;
boolean isNextToSoftClip = false;
boolean sawMop = false;
public LIBS_position(final SAMRecord read) {
this.read = read;
numOperators = read.getCigar().numCigarElements();
}
public int getCurrentReadOffset() {
return Math.max(0, currentReadOffset - 1);
}
/**
* Steps forward on the genome. Returns false when done reading the read, true otherwise.
*/
public boolean stepForwardOnGenome() {
if ( currentOperatorIndex == numOperators )
return false;
CigarElement curElement = read.getCigar().getCigarElement(currentOperatorIndex);
if ( currentPositionOnOperator >= curElement.getLength() ) {
if ( ++currentOperatorIndex == numOperators )
return false;
curElement = read.getCigar().getCigarElement(currentOperatorIndex);
currentPositionOnOperator = 0;
}
switch ( curElement.getOperator() ) {
case I: // insertion w.r.t. the reference
if ( !sawMop )
break;
case S: // soft clip
currentReadOffset += curElement.getLength();
case H: // hard clip
case P: // padding
currentOperatorIndex++;
return stepForwardOnGenome();
case D: // deletion w.r.t. the reference
case N: // reference skip (looks and gets processed just like a "deletion", just different logical meaning)
currentPositionOnOperator++;
break;
case M:
case EQ:
case X:
sawMop = true;
currentReadOffset++;
currentPositionOnOperator++;
break;
default:
throw new IllegalStateException("No support for cigar op: " + curElement.getOperator());
}
final boolean isFirstOp = currentOperatorIndex == 0;
final boolean isLastOp = currentOperatorIndex == numOperators - 1;
final boolean isFirstBaseOfOp = currentPositionOnOperator == 1;
final boolean isLastBaseOfOp = currentPositionOnOperator == curElement.getLength();
isBeforeDeletionStart = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.D, isLastOp, isLastBaseOfOp);
isBeforeDeletedBase = isBeforeDeletionStart || (!isLastBaseOfOp && curElement.getOperator() == CigarOperator.D);
isAfterDeletionEnd = isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.D, isFirstOp, isFirstBaseOfOp);
isAfterDeletedBase = isAfterDeletionEnd || (!isFirstBaseOfOp && curElement.getOperator() == CigarOperator.D);
isBeforeInsertion = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.I, isLastOp, isLastBaseOfOp)
|| (!sawMop && curElement.getOperator() == CigarOperator.I);
isAfterInsertion = isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.I, isFirstOp, isFirstBaseOfOp);
isNextToSoftClip = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.S, isLastOp, isLastBaseOfOp)
|| isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.S, isFirstOp, isFirstBaseOfOp);
return true;
}
private static boolean isBeforeOp(final Cigar cigar,
final int currentOperatorIndex,
final CigarOperator op,
final boolean isLastOp,
final boolean isLastBaseOfOp) {
return !isLastOp && isLastBaseOfOp && cigar.getCigarElement(currentOperatorIndex+1).getOperator() == op;
}
private static boolean isAfterOp(final Cigar cigar,
final int currentOperatorIndex,
final CigarOperator op,
final boolean isFirstOp,
final boolean isFirstBaseOfOp) {
return !isFirstOp && isFirstBaseOfOp && cigar.getCigarElement(currentOperatorIndex-1).getOperator() == op;
}
}
}

View File

@ -0,0 +1,104 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.gatk.traversals;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.ActiveRegionWalker;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocSortedSet;
import org.broadinstitute.sting.utils.activeregion.ActiveRegion;
import org.broadinstitute.sting.utils.activeregion.ActiveRegionReadState;
import org.broadinstitute.sting.utils.activeregion.ActivityProfileResult;
import java.util.*;
/**
* ActiveRegionWalker for unit testing
*
* User: depristo
* Date: 1/15/13
* Time: 1:28 PM
*/
class DummyActiveRegionWalker extends ActiveRegionWalker<Integer, Integer> {
private final double prob;
private EnumSet<ActiveRegionReadState> states = super.desiredReadStates();
private GenomeLocSortedSet activeRegions = null;
protected List<GenomeLoc> isActiveCalls = new ArrayList<GenomeLoc>();
protected Map<GenomeLoc, ActiveRegion> mappedActiveRegions = new LinkedHashMap<GenomeLoc, ActiveRegion>();
public DummyActiveRegionWalker() {
this(1.0);
}
public DummyActiveRegionWalker(double constProb) {
this.prob = constProb;
}
public DummyActiveRegionWalker(EnumSet<ActiveRegionReadState> wantStates) {
this(1.0);
this.states = wantStates;
}
public DummyActiveRegionWalker(GenomeLocSortedSet activeRegions) {
this(1.0);
this.activeRegions = activeRegions;
}
public void setStates(EnumSet<ActiveRegionReadState> states) {
this.states = states;
}
@Override
public EnumSet<ActiveRegionReadState> desiredReadStates() {
return states;
}
@Override
public ActivityProfileResult isActive(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
isActiveCalls.add(ref.getLocus());
final double p = activeRegions == null || activeRegions.overlaps(ref.getLocus()) ? prob : 0.0;
return new ActivityProfileResult(ref.getLocus(), p);
}
@Override
public Integer map(ActiveRegion activeRegion, RefMetaDataTracker metaDataTracker) {
mappedActiveRegions.put(activeRegion.getLocation(), activeRegion);
return 0;
}
@Override
public Integer reduceInit() {
return 0;
}
@Override
public Integer reduce(Integer value, Integer sum) {
return 0;
}
}

View File

@ -1,59 +1,58 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.gatk.traversals;
import com.google.java.contract.PreconditionError;
import net.sf.samtools.*;
import org.broadinstitute.sting.commandline.Tags;
import org.broadinstitute.sting.gatk.arguments.ValidationExclusion;
import org.broadinstitute.sting.gatk.datasources.reads.*;
import org.broadinstitute.sting.gatk.filters.ReadFilter;
import org.broadinstitute.sting.gatk.iterators.ReadTransformer;
import org.broadinstitute.sting.gatk.resourcemanagement.ThreadAllocation;
import org.broadinstitute.sting.gatk.walkers.Walker;
import org.broadinstitute.sting.utils.GenomeLocSortedSet;
import org.broadinstitute.sting.utils.SampleUtils;
import org.broadinstitute.sting.utils.activeregion.ActiveRegionReadState;
import org.broadinstitute.sting.utils.interval.IntervalMergingRule;
import org.broadinstitute.sting.utils.interval.IntervalUtils;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import org.broadinstitute.sting.utils.sam.*;
import net.sf.picard.reference.IndexedFastaSequenceFile;
import org.broadinstitute.sting.BaseTest;
import org.broadinstitute.sting.gatk.GenomeAnalysisEngine;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.datasources.providers.LocusShardDataProvider;
import org.broadinstitute.sting.gatk.datasources.rmd.ReferenceOrderedDataSource;
import org.broadinstitute.sting.gatk.executive.WindowMaker;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.walkers.ActiveRegionWalker;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.activeregion.ActiveRegion;
import org.broadinstitute.sting.utils.activeregion.ActivityProfileResult;
import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile;
import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils;
import org.broadinstitute.sting.utils.sam.ReadUtils;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
@ -71,57 +70,16 @@ import java.util.*;
* http://iwww.broadinstitute.org/gsa/wiki/index.php/Active_Region_Traversal_Contract
*/
public class TraverseActiveRegionsUnitTest extends BaseTest {
private final static boolean ENFORCE_CONTRACTS = false;
private final static boolean DEBUG = false;
private class DummyActiveRegionWalker extends ActiveRegionWalker<Integer, Integer> {
private final double prob;
private EnumSet<ActiveRegionReadState> states = super.desiredReadStates();
protected List<GenomeLoc> isActiveCalls = new ArrayList<GenomeLoc>();
protected Map<GenomeLoc, ActiveRegion> mappedActiveRegions = new HashMap<GenomeLoc, ActiveRegion>();
public DummyActiveRegionWalker() {
this.prob = 1.0;
}
public DummyActiveRegionWalker(double constProb) {
this.prob = constProb;
}
public DummyActiveRegionWalker(EnumSet<ActiveRegionReadState> wantStates) {
this.prob = 1.0;
this.states = wantStates;
}
@Override
public EnumSet<ActiveRegionReadState> desiredReadStates() {
return states;
}
@Override
public ActivityProfileResult isActive(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
isActiveCalls.add(ref.getLocus());
return new ActivityProfileResult(ref.getLocus(), prob);
}
@Override
public Integer map(ActiveRegion activeRegion, RefMetaDataTracker metaDataTracker) {
mappedActiveRegions.put(activeRegion.getLocation(), activeRegion);
return 0;
}
@Override
public Integer reduceInit() {
return 0;
}
@Override
public Integer reduce(Integer value, Integer sum) {
return 0;
}
@DataProvider(name = "TraversalEngineProvider")
public Object[][] makeTraversals() {
final List<Object[]> traversals = new LinkedList<Object[]>();
traversals.add(new Object[]{new TraverseActiveRegions<Integer, Integer>()});
return traversals.toArray(new Object[][]{});
}
private final TraverseActiveRegions<Integer, Integer> t = new TraverseActiveRegions<Integer, Integer>();
private IndexedFastaSequenceFile reference;
private SAMSequenceDictionary dictionary;
private GenomeLocParser genomeLocParser;
@ -133,6 +91,7 @@ public class TraverseActiveRegionsUnitTest extends BaseTest {
@BeforeClass
private void init() throws FileNotFoundException {
//reference = new CachingIndexedFastaSequenceFile(new File("/Users/depristo/Desktop/broadLocal/localData/human_g1k_v37.fasta")); // hg19Reference));
reference = new CachingIndexedFastaSequenceFile(new File(hg19Reference));
dictionary = reference.getSequenceDictionary();
genomeLocParser = new GenomeLocParser(dictionary);
@ -187,18 +146,18 @@ public class TraverseActiveRegionsUnitTest extends BaseTest {
out.close();
}
@Test
public void testAllBasesSeen() {
@Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider")
public void testAllBasesSeen(TraverseActiveRegions t) {
DummyActiveRegionWalker walker = new DummyActiveRegionWalker();
List<GenomeLoc> activeIntervals = getIsActiveIntervals(walker, intervals);
List<GenomeLoc> activeIntervals = getIsActiveIntervals(t, walker, intervals);
// Contract: Every genome position in the analysis interval(s) is processed by the walker's isActive() call
verifyEqualIntervals(intervals, activeIntervals);
}
private List<GenomeLoc> getIsActiveIntervals(DummyActiveRegionWalker walker, List<GenomeLoc> intervals) {
private List<GenomeLoc> getIsActiveIntervals(final TraverseActiveRegions t, DummyActiveRegionWalker walker, List<GenomeLoc> intervals) {
List<GenomeLoc> activeIntervals = new ArrayList<GenomeLoc>();
for (LocusShardDataProvider dataProvider : createDataProviders(intervals, testBAM)) {
for (LocusShardDataProvider dataProvider : createDataProviders(t, walker, intervals, testBAM)) {
t.traverse(walker, dataProvider, 0);
activeIntervals.addAll(walker.isActiveCalls);
}
@ -206,23 +165,23 @@ public class TraverseActiveRegionsUnitTest extends BaseTest {
return activeIntervals;
}
@Test (expectedExceptions = PreconditionError.class)
public void testIsActiveRangeLow () {
@Test (enabled = ENFORCE_CONTRACTS, dataProvider = "TraversalEngineProvider", expectedExceptions = PreconditionError.class)
public void testIsActiveRangeLow (TraverseActiveRegions t) {
DummyActiveRegionWalker walker = new DummyActiveRegionWalker(-0.1);
getActiveRegions(walker, intervals).values();
getActiveRegions(t, walker, intervals).values();
}
@Test (expectedExceptions = PreconditionError.class)
public void testIsActiveRangeHigh () {
@Test (enabled = ENFORCE_CONTRACTS, dataProvider = "TraversalEngineProvider", expectedExceptions = PreconditionError.class)
public void testIsActiveRangeHigh (TraverseActiveRegions t) {
DummyActiveRegionWalker walker = new DummyActiveRegionWalker(1.1);
getActiveRegions(walker, intervals).values();
getActiveRegions(t, walker, intervals).values();
}
@Test
public void testActiveRegionCoverage() {
@Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider")
public void testActiveRegionCoverage(TraverseActiveRegions t) {
DummyActiveRegionWalker walker = new DummyActiveRegionWalker();
Collection<ActiveRegion> activeRegions = getActiveRegions(walker, intervals).values();
Collection<ActiveRegion> activeRegions = getActiveRegions(t, walker, intervals).values();
verifyActiveRegionCoverage(intervals, activeRegions);
}
@ -268,11 +227,11 @@ public class TraverseActiveRegionsUnitTest extends BaseTest {
Assert.assertEquals(intervalStops.size(), 0, "Interval stop location does not match an active region stop location");
}
@Test
public void testActiveRegionExtensionOnContig() {
@Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider")
public void testActiveRegionExtensionOnContig(TraverseActiveRegions t) {
DummyActiveRegionWalker walker = new DummyActiveRegionWalker();
Collection<ActiveRegion> activeRegions = getActiveRegions(walker, intervals).values();
Collection<ActiveRegion> activeRegions = getActiveRegions(t, walker, intervals).values();
for (ActiveRegion activeRegion : activeRegions) {
GenomeLoc loc = activeRegion.getExtendedLoc();
@ -283,8 +242,8 @@ public class TraverseActiveRegionsUnitTest extends BaseTest {
}
}
@Test
public void testPrimaryReadMapping() {
@Test(enabled = true, dataProvider = "TraversalEngineProvider")
public void testPrimaryReadMapping(TraverseActiveRegions t) {
DummyActiveRegionWalker walker = new DummyActiveRegionWalker();
// Contract: Each read has the Primary state in a single region (or none)
@ -293,41 +252,41 @@ public class TraverseActiveRegionsUnitTest extends BaseTest {
// simple: Primary in 1:1-999
// overlap_equal: Primary in 1:1-999
// overlap_unequal: Primary in 1:1-999
// boundary_equal: Non-Primary in 1:1000-1999, Primary in 1:2000-2999
// boundary_equal: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
// boundary_unequal: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
// boundary_1_pre: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
// boundary_1_post: Non-Primary in 1:1000-1999, Primary in 1:2000-2999
// extended_and_np: Non-Primary in 1:1-999, Primary in 1:1000-1999, Extended in 1:2000-2999
// boundary_1_post: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
// extended_and_np: Primary in 1:1-999, Non-Primary in 1:1000-1999, Extended in 1:2000-2999
// outside_intervals: none
// shard_boundary_1_pre: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
// shard_boundary_1_post: Non-Primary in 1:14908-16384, Primary in 1:16385-16927
// shard_boundary_equal: Non-Primary in 1:14908-16384, Primary in 1:16385-16927
// shard_boundary_1_post: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
// shard_boundary_equal: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
// simple20: Primary in 20:10000-10100
Map<GenomeLoc, ActiveRegion> activeRegions = getActiveRegions(walker, intervals);
Map<GenomeLoc, ActiveRegion> activeRegions = getActiveRegions(t, walker, intervals);
ActiveRegion region;
region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1, 999));
verifyReadMapping(region, "simple", "overlap_equal", "overlap_unequal");
verifyReadMapping(region, "simple", "overlap_equal", "overlap_unequal", "extended_and_np");
region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1000, 1999));
verifyReadMapping(region, "boundary_unequal", "extended_and_np", "boundary_1_pre");
verifyReadMapping(region, "boundary_unequal", "boundary_1_pre", "boundary_equal", "boundary_1_post");
region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 2000, 2999));
verifyReadMapping(region, "boundary_equal", "boundary_1_post");
verifyReadMapping(region);
region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 14908, 16384));
verifyReadMapping(region, "shard_boundary_1_pre");
verifyReadMapping(region, "shard_boundary_1_pre", "shard_boundary_1_post", "shard_boundary_equal");
region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 16385, 16927));
verifyReadMapping(region, "shard_boundary_1_post", "shard_boundary_equal");
verifyReadMapping(region);
region = activeRegions.get(genomeLocParser.createGenomeLoc("20", 10000, 10100));
verifyReadMapping(region, "simple20");
}
@Test
public void testNonPrimaryReadMapping() {
@Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider")
public void testNonPrimaryReadMapping(TraverseActiveRegions t) {
DummyActiveRegionWalker walker = new DummyActiveRegionWalker(
EnumSet.of(ActiveRegionReadState.PRIMARY, ActiveRegionReadState.NONPRIMARY));
@ -339,18 +298,18 @@ public class TraverseActiveRegionsUnitTest extends BaseTest {
// simple: Primary in 1:1-999
// overlap_equal: Primary in 1:1-999
// overlap_unequal: Primary in 1:1-999
// boundary_equal: Non-Primary in 1:1000-1999, Primary in 1:2000-2999
// boundary_equal: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
// boundary_unequal: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
// boundary_1_pre: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
// boundary_1_post: Non-Primary in 1:1000-1999, Primary in 1:2000-2999
// extended_and_np: Non-Primary in 1:1-999, Primary in 1:1000-1999, Extended in 1:2000-2999
// boundary_1_post: Primary in 1:1000-1999, Non-Primary in 1:2000-2999
// extended_and_np: Primary in 1:1-999, Non-Primary in 1:1000-1999, Extended in 1:2000-2999
// outside_intervals: none
// shard_boundary_1_pre: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
// shard_boundary_1_post: Non-Primary in 1:14908-16384, Primary in 1:16385-16927
// shard_boundary_equal: Non-Primary in 1:14908-16384, Primary in 1:16385-16927
// shard_boundary_1_post: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
// shard_boundary_equal: Primary in 1:14908-16384, Non-Primary in 1:16385-16927
// simple20: Primary in 20:10000-10100
Map<GenomeLoc, ActiveRegion> activeRegions = getActiveRegions(walker, intervals);
Map<GenomeLoc, ActiveRegion> activeRegions = getActiveRegions(t, walker, intervals);
ActiveRegion region;
region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1, 999));
@ -372,8 +331,8 @@ public class TraverseActiveRegionsUnitTest extends BaseTest {
verifyReadMapping(region, "simple20");
}
@Test
public void testExtendedReadMapping() {
@Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider")
public void testExtendedReadMapping(TraverseActiveRegions t) {
DummyActiveRegionWalker walker = new DummyActiveRegionWalker(
EnumSet.of(ActiveRegionReadState.PRIMARY, ActiveRegionReadState.NONPRIMARY, ActiveRegionReadState.EXTENDED));
@ -397,7 +356,7 @@ public class TraverseActiveRegionsUnitTest extends BaseTest {
// shard_boundary_equal: Non-Primary in 1:14908-16384, Primary in 1:16385-16927
// simple20: Primary in 20:10000-10100
Map<GenomeLoc, ActiveRegion> activeRegions = getActiveRegions(walker, intervals);
Map<GenomeLoc, ActiveRegion> activeRegions = getActiveRegions(t, walker, intervals);
ActiveRegion region;
region = activeRegions.get(genomeLocParser.createGenomeLoc("1", 1, 999));
@ -419,24 +378,34 @@ public class TraverseActiveRegionsUnitTest extends BaseTest {
verifyReadMapping(region, "simple20");
}
@Test
public void testUnmappedReads() {
@Test(enabled = true && ! DEBUG, dataProvider = "TraversalEngineProvider")
public void testUnmappedReads(TraverseActiveRegions t) {
// TODO
}
private void verifyReadMapping(ActiveRegion region, String... reads) {
final Set<String> regionReads = new HashSet<String>();
for (SAMRecord read : region.getReads()) {
Assert.assertFalse(regionReads.contains(read.getReadName()), "Duplicate reads detected in region " + region + " read " + read.getReadName());
regionReads.add(read.getReadName());
}
Collection<String> wantReads = new ArrayList<String>(Arrays.asList(reads));
for (SAMRecord read : region.getReads()) {
String regionReadName = read.getReadName();
Assert.assertTrue(wantReads.contains(regionReadName), "Read " + regionReadName + " assigned to active region " + region);
Assert.assertTrue(wantReads.contains(regionReadName), "Read " + regionReadName + " incorrectly assigned to active region " + region);
wantReads.remove(regionReadName);
}
Assert.assertTrue(wantReads.isEmpty(), "Reads missing in active region " + region);
Assert.assertTrue(wantReads.isEmpty(), "Reads missing in active region " + region + ", wanted " + (wantReads.isEmpty() ? "" : wantReads.iterator().next()));
}
private Map<GenomeLoc, ActiveRegion> getActiveRegions(DummyActiveRegionWalker walker, List<GenomeLoc> intervals) {
for (LocusShardDataProvider dataProvider : createDataProviders(intervals, testBAM))
private Map<GenomeLoc, ActiveRegion> getActiveRegions(TraverseActiveRegions t, DummyActiveRegionWalker walker, List<GenomeLoc> intervals) {
return getActiveRegions(t, walker, intervals, testBAM);
}
private Map<GenomeLoc, ActiveRegion> getActiveRegions(TraverseActiveRegions t, DummyActiveRegionWalker walker, List<GenomeLoc> intervals, final String bam) {
for (LocusShardDataProvider dataProvider : createDataProviders(t, walker, intervals, bam))
t.traverse(walker, dataProvider, 0);
t.endTraversal(walker, 0);
@ -496,28 +465,223 @@ public class TraverseActiveRegionsUnitTest extends BaseTest {
record.setCigar(cigar);
record.setReadString(new String(new char[len]).replace("\0", "A"));
record.setBaseQualities(new byte[len]);
record.setReadGroup(new GATKSAMReadGroupRecord(header.getReadGroup("test")));
return record;
}
private List<LocusShardDataProvider> createDataProviders(List<GenomeLoc> intervals, String bamFile) {
private List<LocusShardDataProvider> createDataProviders(TraverseActiveRegions traverseActiveRegions, final Walker walker, List<GenomeLoc> intervals, String bamFile) {
GenomeAnalysisEngine engine = new GenomeAnalysisEngine();
engine.setGenomeLocParser(genomeLocParser);
t.initialize(engine);
traverseActiveRegions.initialize(engine, walker);
Collection<SAMReaderID> samFiles = new ArrayList<SAMReaderID>();
SAMReaderID readerID = new SAMReaderID(new File(bamFile), new Tags());
samFiles.add(readerID);
SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser);
SAMDataSource dataSource = new SAMDataSource(samFiles, new ThreadAllocation(), null, genomeLocParser,
false,
SAMFileReader.ValidationStringency.STRICT,
null,
null,
new ValidationExclusion(),
new ArrayList<ReadFilter>(),
new ArrayList<ReadTransformer>(),
false, (byte)30, false, true);
final Set<String> samples = SampleUtils.getSAMFileSamples(dataSource.getHeader());
List<LocusShardDataProvider> providers = new ArrayList<LocusShardDataProvider>();
for (Shard shard : dataSource.createShardIteratorOverIntervals(new GenomeLocSortedSet(genomeLocParser, intervals), new LocusShardBalancer())) {
for (WindowMaker.WindowMakerIterator window : new WindowMaker(shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs())) {
for (WindowMaker.WindowMakerIterator window : new WindowMaker(shard, genomeLocParser, dataSource.seek(shard), shard.getGenomeLocs(), samples)) {
providers.add(new LocusShardDataProvider(shard, shard.getReadProperties(), genomeLocParser, window.getLocus(), window, reference, new ArrayList<ReferenceOrderedDataSource>()));
}
}
return providers;
}
// ---------------------------------------------------------------------------------------------------------
//
// Combinatorial tests to ensure reads are going into the right regions
//
// ---------------------------------------------------------------------------------------------------------
@DataProvider(name = "CombinatorialARTTilingProvider")
public Object[][] makeCombinatorialARTTilingProvider() {
final List<Object[]> tests = new LinkedList<Object[]>();
final List<Integer> starts = Arrays.asList(
1, // very start of the chromosome
ArtificialBAMBuilder.BAM_SHARD_SIZE - 100, // right before the shard boundary
ArtificialBAMBuilder.BAM_SHARD_SIZE + 100 // right after the shard boundary
);
final List<EnumSet<ActiveRegionReadState>> allReadStates = Arrays.asList(
EnumSet.of(ActiveRegionReadState.PRIMARY),
EnumSet.of(ActiveRegionReadState.PRIMARY, ActiveRegionReadState.NONPRIMARY),
EnumSet.of(ActiveRegionReadState.PRIMARY, ActiveRegionReadState.NONPRIMARY, ActiveRegionReadState.EXTENDED)
);
final int maxTests = Integer.MAX_VALUE;
int nTests = 0;
for ( final int readLength : Arrays.asList(10, 100) ) {
for ( final int skips : Arrays.asList(0, 1, 10) ) {
for ( final int start : starts ) {
for ( final int nReadsPerLocus : Arrays.asList(1, 2) ) {
for ( final int nLoci : Arrays.asList(1, 1000) ) {
for ( EnumSet<ActiveRegionReadState> readStates : allReadStates ) {
final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(reference, nReadsPerLocus, nLoci);
bamBuilder.setReadLength(readLength);
bamBuilder.setSkipNLoci(skips);
bamBuilder.setAlignmentStart(start);
for ( final GenomeLocSortedSet activeRegions : enumerateActiveRegions(bamBuilder.getAlignmentStart(), bamBuilder.getAlignmentEnd())) {
nTests++;
if ( nTests < maxTests ) // && nTests == 1238 )
tests.add(new Object[]{nTests, activeRegions, readStates, bamBuilder});
}
}
}
}
}
}
}
return tests.toArray(new Object[][]{});
}
private Collection<GenomeLocSortedSet> enumerateActiveRegions(final int start, final int stop) {
// should basically cut up entire region into equal sized chunks, of
// size 10, 20, 50, 100, etc, alternating skipping pieces so they are inactive
// Need to make sure we include some edge cases:
final List<GenomeLocSortedSet> activeRegions = new LinkedList<GenomeLocSortedSet>();
for ( final int stepSize : Arrays.asList(11, 29, 53, 97) ) {
for ( final boolean startWithActive : Arrays.asList(true, false) ) {
activeRegions.add(makeActiveRegionMask(start, stop, stepSize, startWithActive));
}
}
// active region is the whole interval
activeRegions.add(new GenomeLocSortedSet(genomeLocParser, genomeLocParser.createGenomeLoc("1", start, stop)));
// active region extends up to the end of the data, but doesn't include start
activeRegions.add(new GenomeLocSortedSet(genomeLocParser, genomeLocParser.createGenomeLoc("1", start+10, stop)));
return activeRegions;
}
private GenomeLocSortedSet makeActiveRegionMask(final int start, final int stop, final int stepSize, final boolean startWithActive) {
final GenomeLocSortedSet active = new GenomeLocSortedSet(genomeLocParser);
boolean includeRegion = startWithActive;
for ( int left = start; left < stop; left += stepSize) {
final int right = left + stepSize;
final GenomeLoc region = genomeLocParser.createGenomeLoc("1", left, right);
if ( includeRegion )
active.add(region);
includeRegion = ! includeRegion;
}
return active;
}
@Test(enabled = true && ! DEBUG, dataProvider = "CombinatorialARTTilingProvider")
public void testARTReadsInActiveRegions(final int id, final GenomeLocSortedSet activeRegions, final EnumSet<ActiveRegionReadState> readStates, final ArtificialBAMBuilder bamBuilder) {
logger.warn("Running testARTReadsInActiveRegions id=" + id + " locs " + activeRegions + " against bam " + bamBuilder);
final List<GenomeLoc> intervals = Arrays.asList(
genomeLocParser.createGenomeLoc("1", bamBuilder.getAlignmentStart(), bamBuilder.getAlignmentEnd())
);
final DummyActiveRegionWalker walker = new DummyActiveRegionWalker(activeRegions);
walker.setStates(readStates);
final TraverseActiveRegions traversal = new TraverseActiveRegions<Integer, Integer>();
final Map<GenomeLoc, ActiveRegion> activeRegionsMap = getActiveRegions(traversal, walker, intervals, bamBuilder.makeTemporarilyBAMFile().toString());
final Set<String> alreadySeenReads = new HashSet<String>(); // for use with the primary / non-primary
for ( final ActiveRegion region : activeRegionsMap.values() ) {
final Set<String> readNamesInRegion = readNamesInRegion(region);
int nReadsExpectedInRegion = 0;
for ( final GATKSAMRecord read : bamBuilder.makeReads() ) {
final GenomeLoc readLoc = genomeLocParser.createGenomeLoc(read);
boolean shouldBeInRegion = readStates.contains(ActiveRegionReadState.EXTENDED)
? region.getExtendedLoc().overlapsP(readLoc)
: region.getLocation().overlapsP(readLoc);
if ( ! readStates.contains(ActiveRegionReadState.NONPRIMARY) ) {
if ( alreadySeenReads.contains(read.getReadName()) )
shouldBeInRegion = false;
else if ( shouldBeInRegion )
alreadySeenReads.add(read.getReadName());
}
Assert.assertEquals(readNamesInRegion.contains(read.getReadName()), shouldBeInRegion, "Region " + region +
" failed contains read check: read " + read + " with span " + readLoc + " should be in region is " + shouldBeInRegion + " but I got the opposite");
nReadsExpectedInRegion += shouldBeInRegion ? 1 : 0;
}
Assert.assertEquals(region.size(), nReadsExpectedInRegion, "There are more reads in active region " + region + "than expected");
}
}
private Set<String> readNamesInRegion(final ActiveRegion region) {
final Set<String> readNames = new LinkedHashSet<String>(region.getReads().size());
for ( final SAMRecord read : region.getReads() )
readNames.add(read.getReadName());
return readNames;
}
// ---------------------------------------------------------------------------------------------------------
//
// Make sure all insertion reads are properly included in the active regions
//
// ---------------------------------------------------------------------------------------------------------
@Test
public void ensureAllInsertionReadsAreInActiveRegions() {
final int readLength = 10;
final int start = 20;
final int nReadsPerLocus = 10;
final int nLoci = 3;
final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(reference, nReadsPerLocus, nLoci);
bamBuilder.setReadLength(readLength);
bamBuilder.setAlignmentStart(start);
// note that the position must be +1 as the read's all I cigar puts the end 1 bp before start, leaving it out of the region
GATKSAMRecord allI = ArtificialSAMUtils.createArtificialRead(bamBuilder.getHeader(),"allI",0,start+1,readLength);
allI.setCigarString(readLength + "I");
allI.setReadGroup(new GATKSAMReadGroupRecord(bamBuilder.getHeader().getReadGroups().get(0)));
bamBuilder.addReads(allI);
final GenomeLocSortedSet activeRegions = new GenomeLocSortedSet(bamBuilder.getGenomeLocParser());
activeRegions.add(bamBuilder.getGenomeLocParser().createGenomeLoc("1", 10, 30));
final List<GenomeLoc> intervals = Arrays.asList(
genomeLocParser.createGenomeLoc("1", bamBuilder.getAlignmentStart(), bamBuilder.getAlignmentEnd())
);
final DummyActiveRegionWalker walker = new DummyActiveRegionWalker(activeRegions);
final TraverseActiveRegions traversal = new TraverseActiveRegions<Integer, Integer>();
final Map<GenomeLoc, ActiveRegion> activeRegionsMap = getActiveRegions(traversal, walker, intervals, bamBuilder.makeTemporarilyBAMFile().toString());
final ActiveRegion region = activeRegionsMap.values().iterator().next();
int nReadsExpectedInRegion = 0;
final Set<String> readNamesInRegion = readNamesInRegion(region);
for ( final GATKSAMRecord read : bamBuilder.makeReads() ) {
Assert.assertTrue(readNamesInRegion.contains(read.getReadName()),
"Region " + region + " should contain read " + read + " with cigar " + read.getCigarString() + " but it wasn't");
nReadsExpectedInRegion++;
}
Assert.assertEquals(region.size(), nReadsExpectedInRegion, "There are more reads in active region " + region + "than expected");
}
}

View File

@ -68,7 +68,7 @@ public class TraverseDuplicatesUnitTest extends BaseTest {
engine.setReferenceDataSource(refFile);
engine.setGenomeLocParser(genomeLocParser);
obj.initialize(engine);
obj.initialize(engine, null);
}
@Test

View File

@ -132,7 +132,7 @@ public class TraverseReadsUnitTest extends BaseTest {
countReadWalker = new CountReads();
traversalEngine = new TraverseReadsNano(1);
traversalEngine.initialize(engine);
traversalEngine.initialize(engine, countReadWalker);
}
/** Test out that we can shard the file and iterate over every read */

View File

@ -32,8 +32,10 @@ package org.broadinstitute.sting.utils.fasta;
import net.sf.picard.reference.IndexedFastaSequenceFile;
import net.sf.picard.reference.ReferenceSequence;
import net.sf.samtools.SAMSequenceRecord;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Priority;
import org.broadinstitute.sting.BaseTest;
import org.broadinstitute.sting.utils.exceptions.UserException;
import org.testng.Assert;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
@ -49,7 +51,7 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
/**
* Basic unit test for GenomeLoc
* Basic unit test for CachingIndexedFastaSequenceFile
*/
public class CachingIndexedFastaSequenceFileUnitTest extends BaseTest {
private File simpleFasta = new File(publicTestDir + "/exampleFASTA.fasta");
@ -80,7 +82,7 @@ public class CachingIndexedFastaSequenceFileUnitTest extends BaseTest {
@Test(dataProvider = "fastas", enabled = true && ! DEBUG)
public void testCachingIndexedFastaReaderSequential1(File fasta, int cacheSize, int querySize) throws FileNotFoundException {
final CachingIndexedFastaSequenceFile caching = new CachingIndexedFastaSequenceFile(fasta, getCacheSize(cacheSize), true);
final CachingIndexedFastaSequenceFile caching = new CachingIndexedFastaSequenceFile(fasta, getCacheSize(cacheSize), true, false);
SAMSequenceRecord contig = caching.getSequenceDictionary().getSequence(0);
logger.warn(String.format("Checking contig %s length %d with cache size %d and query size %d",
@ -122,7 +124,7 @@ public class CachingIndexedFastaSequenceFileUnitTest extends BaseTest {
@Test(dataProvider = "fastas", enabled = true && ! DEBUG)
public void testCachingIndexedFastaReaderTwoStage(File fasta, int cacheSize, int querySize) throws FileNotFoundException {
final IndexedFastaSequenceFile uncached = new IndexedFastaSequenceFile(fasta);
final CachingIndexedFastaSequenceFile caching = new CachingIndexedFastaSequenceFile(fasta, getCacheSize(cacheSize), true);
final CachingIndexedFastaSequenceFile caching = new CachingIndexedFastaSequenceFile(fasta, getCacheSize(cacheSize), true, false);
SAMSequenceRecord contig = uncached.getSequenceDictionary().getSequence(0);
@ -167,7 +169,7 @@ public class CachingIndexedFastaSequenceFileUnitTest extends BaseTest {
@Test(dataProvider = "ParallelFastaTest", enabled = true && ! DEBUG, timeOut = 60000)
public void testCachingIndexedFastaReaderParallel(final File fasta, final int cacheSize, final int querySize, final int nt) throws FileNotFoundException, InterruptedException {
final CachingIndexedFastaSequenceFile caching = new CachingIndexedFastaSequenceFile(fasta, getCacheSize(cacheSize), true);
final CachingIndexedFastaSequenceFile caching = new CachingIndexedFastaSequenceFile(fasta, getCacheSize(cacheSize), true, false);
logger.warn(String.format("Parallel caching index fasta reader test cacheSize %d querySize %d nt %d", caching.getCacheSize(), querySize, nt));
for ( int iterations = 0; iterations < 1; iterations++ ) {
@ -230,4 +232,33 @@ public class CachingIndexedFastaSequenceFileUnitTest extends BaseTest {
else
return new String(reader.getSubsequenceAt(contig, start, stop).getBases());
}
@Test(enabled = true)
public void testIupacChanges() throws FileNotFoundException, InterruptedException {
final String testFasta = privateTestDir + "iupacFASTA.fasta";
final CachingIndexedFastaSequenceFile iupacPreserving = new CachingIndexedFastaSequenceFile(new File(testFasta), CachingIndexedFastaSequenceFile.DEFAULT_CACHE_SIZE, false, true);
final CachingIndexedFastaSequenceFile makeNs = new CachingIndexedFastaSequenceFile(new File(testFasta));
int preservingNs = 0;
int changingNs = 0;
for ( SAMSequenceRecord contig : iupacPreserving.getSequenceDictionary().getSequences() ) {
final String sPreserving = fetchBaseString(iupacPreserving, contig.getSequenceName(), 0, 15000);
preservingNs += StringUtils.countMatches(sPreserving, "N");
final String sChanging = fetchBaseString(makeNs, contig.getSequenceName(), 0, 15000);
changingNs += StringUtils.countMatches(sChanging, "N");
}
Assert.assertEquals(changingNs, preservingNs + 4);
}
@Test(enabled = true, expectedExceptions = {UserException.class})
public void testFailOnBadBase() throws FileNotFoundException, InterruptedException {
final String testFasta = privateTestDir + "problematicFASTA.fasta";
final CachingIndexedFastaSequenceFile fasta = new CachingIndexedFastaSequenceFile(new File(testFasta));
for ( SAMSequenceRecord contig : fasta.getSequenceDictionary().getSequences() ) {
fetchBaseString(fasta, contig.getSequenceName(), -1, -1);
}
}
}

View File

@ -27,14 +27,14 @@ package org.broadinstitute.sting.utils.fragments;
import com.google.caliper.Param;
import com.google.caliper.SimpleBenchmark;
import com.google.caliper.runner.CaliperMain;
import net.sf.samtools.SAMFileHeader;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.pileup.ReadBackedPileup;
import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils;
import java.util.*;
import java.util.ArrayList;
import java.util.List;
/**
* Caliper microbenchmark of fragment pileup
@ -76,6 +76,6 @@ public class FragmentUtilsBenchmark extends SimpleBenchmark {
}
public static void main(String[] args) {
CaliperMain.main(FragmentUtilsBenchmark.class, args);
com.google.caliper.Runner.main(FragmentUtilsBenchmark.class, args);
}
}

View File

@ -1068,7 +1068,7 @@ public class IntervalUtilsUnitTest extends BaseTest {
List<IntervalBinding<Feature>> intervalArgs = new ArrayList<IntervalBinding<Feature>>(1);
intervalArgs.add(new IntervalBinding<Feature>(picardIntervalFile.getAbsolutePath()));
IntervalUtils.loadIntervals(intervalArgs, argCollection.intervalSetRule, argCollection.intervalMerging, argCollection.intervalPadding, genomeLocParser);
IntervalUtils.loadIntervals(intervalArgs, argCollection.intervalArguments.intervalSetRule, argCollection.intervalArguments.intervalMerging, argCollection.intervalArguments.intervalPadding, genomeLocParser);
}
@Test(expectedExceptions=UserException.class, dataProvider="invalidIntervalTestData")
@ -1081,7 +1081,7 @@ public class IntervalUtilsUnitTest extends BaseTest {
List<IntervalBinding<Feature>> intervalArgs = new ArrayList<IntervalBinding<Feature>>(1);
intervalArgs.add(new IntervalBinding<Feature>(gatkIntervalFile.getAbsolutePath()));
IntervalUtils.loadIntervals(intervalArgs, argCollection.intervalSetRule, argCollection.intervalMerging, argCollection.intervalPadding, genomeLocParser);
IntervalUtils.loadIntervals(intervalArgs, argCollection.intervalArguments.intervalSetRule, argCollection.intervalArguments.intervalMerging, argCollection.intervalArguments.intervalPadding, genomeLocParser);
}
private File createTempFile( String tempFilePrefix, String tempFileExtension, String... lines ) throws Exception {

View File

@ -0,0 +1,110 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.utils.locusiterator;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import org.testng.Assert;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.util.Arrays;
/**
* testing of the new (non-legacy) version of LocusIteratorByState
*/
public class AlignmentStateMachineUnitTest extends LocusIteratorByStateBaseTest {
@DataProvider(name = "AlignmentStateMachineTest")
public Object[][] makeAlignmentStateMachineTest() {
// return new Object[][]{{new LIBSTest("2M2D2X", 2)}};
// return createLIBSTests(
// Arrays.asList(2),
// Arrays.asList(2));
return createLIBSTests(
Arrays.asList(1, 2),
Arrays.asList(1, 2, 3, 4));
}
@Test(dataProvider = "AlignmentStateMachineTest")
public void testAlignmentStateMachineTest(LIBSTest params) {
final GATKSAMRecord read = params.makeRead();
final AlignmentStateMachine state = new AlignmentStateMachine(read);
final LIBS_position tester = new LIBS_position(read);
// min is one because always visit something, even for 10I reads
final int expectedBpToVisit = read.getAlignmentEnd() - read.getAlignmentStart() + 1;
Assert.assertSame(state.getRead(), read);
Assert.assertNotNull(state.toString());
int bpVisited = 0;
int lastOffset = -1;
// TODO -- more tests about test state machine state before first step?
Assert.assertTrue(state.isLeftEdge());
Assert.assertNull(state.getCigarOperator());
Assert.assertNotNull(state.toString());
Assert.assertEquals(state.getReadOffset(), -1);
Assert.assertEquals(state.getGenomeOffset(), -1);
Assert.assertEquals(state.getCurrentCigarElementOffset(), -1);
Assert.assertEquals(state.getCurrentCigarElement(), null);
while ( state.stepForwardOnGenome() != null ) {
Assert.assertNotNull(state.toString());
tester.stepForwardOnGenome();
Assert.assertTrue(state.getReadOffset() >= lastOffset, "Somehow read offsets are decreasing: lastOffset " + lastOffset + " current " + state.getReadOffset());
Assert.assertEquals(state.getReadOffset(), tester.getCurrentReadOffset(), "Read offsets are wrong at " + bpVisited);
Assert.assertFalse(state.isLeftEdge());
Assert.assertEquals(state.getCurrentCigarElement(), read.getCigar().getCigarElement(tester.currentOperatorIndex), "CigarElement index failure");
Assert.assertEquals(state.getOffsetIntoCurrentCigarElement(), tester.getCurrentPositionOnOperatorBase0(), "CigarElement index failure");
Assert.assertEquals(read.getCigar().getCigarElement(state.getCurrentCigarElementOffset()), state.getCurrentCigarElement(), "Current cigar element isn't what we'd get from the read itself");
Assert.assertTrue(state.getOffsetIntoCurrentCigarElement() >= 0, "Offset into current cigar too small");
Assert.assertTrue(state.getOffsetIntoCurrentCigarElement() < state.getCurrentCigarElement().getLength(), "Offset into current cigar too big");
Assert.assertEquals(state.getGenomeOffset(), tester.getCurrentGenomeOffsetBase0(), "Offset from alignment start is bad");
Assert.assertEquals(state.getGenomePosition(), tester.getCurrentGenomeOffsetBase0() + read.getAlignmentStart(), "GenomePosition start is bad");
Assert.assertEquals(state.getLocation(genomeLocParser).size(), 1, "GenomeLoc position should have size == 1");
Assert.assertEquals(state.getLocation(genomeLocParser).getStart(), state.getGenomePosition(), "GenomeLoc position is bad");
// most tests of this functionality are in LIBS
Assert.assertNotNull(state.makePileupElement());
lastOffset = state.getReadOffset();
bpVisited++;
}
Assert.assertEquals(bpVisited, expectedBpToVisit, "Didn't visit the expected number of bp");
Assert.assertEquals(state.getReadOffset(), read.getReadLength());
Assert.assertEquals(state.getCurrentCigarElementOffset(), read.getCigarLength());
Assert.assertEquals(state.getCurrentCigarElement(), null);
Assert.assertNotNull(state.toString());
}
}

View File

@ -0,0 +1,155 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.utils.locusiterator;
import net.sf.samtools.Cigar;
import net.sf.samtools.CigarElement;
import net.sf.samtools.CigarOperator;
import net.sf.samtools.SAMRecord;
/**
* Created with IntelliJ IDEA.
* User: depristo
* Date: 1/5/13
* Time: 8:42 PM
* To change this template use File | Settings | File Templates.
*/
public final class LIBS_position {
SAMRecord read;
final int numOperators;
int currentOperatorIndex = 0;
int currentPositionOnOperator = 0;
int currentReadOffset = 0;
int currentGenomeOffset = 0;
public boolean isBeforeDeletionStart = false;
public boolean isBeforeDeletedBase = false;
public boolean isAfterDeletionEnd = false;
public boolean isAfterDeletedBase = false;
public boolean isBeforeInsertion = false;
public boolean isAfterInsertion = false;
public boolean isNextToSoftClip = false;
boolean sawMop = false;
public LIBS_position(final SAMRecord read) {
this.read = read;
numOperators = read.getCigar().numCigarElements();
}
public int getCurrentReadOffset() {
return Math.max(0, currentReadOffset - 1);
}
public int getCurrentPositionOnOperatorBase0() {
return currentPositionOnOperator - 1;
}
public int getCurrentGenomeOffsetBase0() {
return currentGenomeOffset - 1;
}
/**
* Steps forward on the genome. Returns false when done reading the read, true otherwise.
*/
public boolean stepForwardOnGenome() {
if ( currentOperatorIndex == numOperators )
return false;
CigarElement curElement = read.getCigar().getCigarElement(currentOperatorIndex);
if ( currentPositionOnOperator >= curElement.getLength() ) {
if ( ++currentOperatorIndex == numOperators )
return false;
curElement = read.getCigar().getCigarElement(currentOperatorIndex);
currentPositionOnOperator = 0;
}
switch ( curElement.getOperator() ) {
case I: // insertion w.r.t. the reference
// if ( !sawMop )
// break;
case S: // soft clip
currentReadOffset += curElement.getLength();
case H: // hard clip
case P: // padding
currentOperatorIndex++;
return stepForwardOnGenome();
case D: // deletion w.r.t. the reference
case N: // reference skip (looks and gets processed just like a "deletion", just different logical meaning)
currentPositionOnOperator++;
currentGenomeOffset++;
break;
case M:
case EQ:
case X:
sawMop = true;
currentReadOffset++;
currentPositionOnOperator++;
currentGenomeOffset++;
break;
default:
throw new IllegalStateException("No support for cigar op: " + curElement.getOperator());
}
final boolean isFirstOp = currentOperatorIndex == 0;
final boolean isLastOp = currentOperatorIndex == numOperators - 1;
final boolean isFirstBaseOfOp = currentPositionOnOperator == 1;
final boolean isLastBaseOfOp = currentPositionOnOperator == curElement.getLength();
isBeforeDeletionStart = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.D, isLastOp, isLastBaseOfOp);
isBeforeDeletedBase = isBeforeDeletionStart || (!isLastBaseOfOp && curElement.getOperator() == CigarOperator.D);
isAfterDeletionEnd = isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.D, isFirstOp, isFirstBaseOfOp);
isAfterDeletedBase = isAfterDeletionEnd || (!isFirstBaseOfOp && curElement.getOperator() == CigarOperator.D);
isBeforeInsertion = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.I, isLastOp, isLastBaseOfOp)
|| (!sawMop && curElement.getOperator() == CigarOperator.I);
isAfterInsertion = isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.I, isFirstOp, isFirstBaseOfOp);
isNextToSoftClip = isBeforeOp(read.getCigar(), currentOperatorIndex, CigarOperator.S, isLastOp, isLastBaseOfOp)
|| isAfterOp(read.getCigar(), currentOperatorIndex, CigarOperator.S, isFirstOp, isFirstBaseOfOp);
return true;
}
private static boolean isBeforeOp(final Cigar cigar,
final int currentOperatorIndex,
final CigarOperator op,
final boolean isLastOp,
final boolean isLastBaseOfOp) {
return !isLastOp && isLastBaseOfOp && cigar.getCigarElement(currentOperatorIndex+1).getOperator() == op;
}
private static boolean isAfterOp(final Cigar cigar,
final int currentOperatorIndex,
final CigarOperator op,
final boolean isFirstOp,
final boolean isFirstBaseOfOp) {
return !isFirstOp && isFirstBaseOfOp && cigar.getCigarElement(currentOperatorIndex-1).getOperator() == op;
}
}

View File

@ -0,0 +1,143 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.utils.locusiterator;
import com.google.caliper.Param;
import com.google.caliper.SimpleBenchmark;
import net.sf.samtools.SAMFileHeader;
import net.sf.samtools.SAMRecord;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.QualityUtils;
import org.broadinstitute.sting.utils.Utils;
import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import java.util.LinkedList;
import java.util.List;
/**
* Caliper microbenchmark of fragment pileup
*/
public class LocusIteratorBenchmark extends SimpleBenchmark {
protected SAMFileHeader header;
protected GenomeLocParser genomeLocParser;
List<GATKSAMRecord> reads = new LinkedList<GATKSAMRecord>();
final int readLength = 101;
final int nReads = 10000;
final int locus = 1;
@Param({"101M", "50M10I40M", "50M10D40M"})
String cigar; // set automatically by framework
@Override protected void setUp() {
header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
for ( int j = 0; j < nReads; j++ ) {
GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, locus, readLength);
read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
final byte[] quals = new byte[readLength];
for ( int i = 0; i < readLength; i++ )
quals[i] = (byte)(i % QualityUtils.MAX_QUAL_SCORE);
read.setBaseQualities(quals);
read.setCigarString(cigar);
reads.add(read);
}
}
// public void timeOriginalLIBS(int rep) {
// for ( int i = 0; i < rep; i++ ) {
// final org.broadinstitute.sting.utils.locusiterator.old.LocusIteratorByState libs =
// new org.broadinstitute.sting.utils.locusiterator.old.LocusIteratorByState(
// new LocusIteratorByStateBaseTest.FakeCloseableIterator<SAMRecord>(reads.iterator()),
// LocusIteratorByStateBaseTest.createTestReadProperties(),
// genomeLocParser,
// LocusIteratorByState.sampleListForSAMWithoutReadGroups());
//
// while ( libs.hasNext() ) {
// AlignmentContext context = libs.next();
// }
// }
// }
//
// public void timeLegacyLIBS(int rep) {
// for ( int i = 0; i < rep; i++ ) {
// final org.broadinstitute.sting.utils.locusiterator.legacy.LegacyLocusIteratorByState libs =
// new org.broadinstitute.sting.utils.locusiterator.legacy.LegacyLocusIteratorByState(
// new LocusIteratorByStateBaseTest.FakeCloseableIterator<SAMRecord>(reads.iterator()),
// LocusIteratorByStateBaseTest.createTestReadProperties(),
// genomeLocParser,
// LocusIteratorByState.sampleListForSAMWithoutReadGroups());
//
// while ( libs.hasNext() ) {
// AlignmentContext context = libs.next();
// }
// }
// }
public void timeNewLIBS(int rep) {
for ( int i = 0; i < rep; i++ ) {
final org.broadinstitute.sting.utils.locusiterator.LocusIteratorByState libs =
new org.broadinstitute.sting.utils.locusiterator.LocusIteratorByState(
new LocusIteratorByStateBaseTest.FakeCloseableIterator<GATKSAMRecord>(reads.iterator()),
LocusIteratorByStateBaseTest.createTestReadProperties(),
genomeLocParser,
LocusIteratorByState.sampleListForSAMWithoutReadGroups());
while ( libs.hasNext() ) {
AlignmentContext context = libs.next();
}
}
}
// public void timeOriginalLIBSStateMachine(int rep) {
// for ( int i = 0; i < rep; i++ ) {
// for ( final SAMRecord read : reads ) {
// final SAMRecordAlignmentState alignmentStateMachine = new SAMRecordAlignmentState(read);
// while ( alignmentStateMachine.stepForwardOnGenome() != null ) {
// alignmentStateMachine.getGenomeOffset();
// }
// }
// }
// }
public void timeAlignmentStateMachine(int rep) {
for ( int i = 0; i < rep; i++ ) {
for ( final GATKSAMRecord read : reads ) {
final AlignmentStateMachine alignmentStateMachine = new AlignmentStateMachine(read);
while ( alignmentStateMachine.stepForwardOnGenome() != null ) {
;
}
}
}
}
public static void main(String[] args) {
com.google.caliper.Runner.main(LocusIteratorBenchmark.class, args);
}
}

View File

@ -0,0 +1,252 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.utils.locusiterator;
import net.sf.samtools.*;
import net.sf.samtools.util.CloseableIterator;
import org.broadinstitute.sting.BaseTest;
import org.broadinstitute.sting.gatk.ReadProperties;
import org.broadinstitute.sting.gatk.arguments.ValidationExclusion;
import org.broadinstitute.sting.gatk.datasources.reads.SAMReaderID;
import org.broadinstitute.sting.gatk.downsampling.DownsamplingMethod;
import org.broadinstitute.sting.gatk.filters.ReadFilter;
import org.broadinstitute.sting.gatk.iterators.ReadTransformer;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.QualityUtils;
import org.broadinstitute.sting.utils.Utils;
import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import java.util.*;
/**
* testing of the new (non-legacy) version of LocusIteratorByState
*/
public class LocusIteratorByStateBaseTest extends BaseTest {
protected static SAMFileHeader header;
protected GenomeLocParser genomeLocParser;
@BeforeClass
public void beforeClass() {
header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 1000);
genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
}
protected LocusIteratorByState makeLTBS(List<GATKSAMRecord> reads,
ReadProperties readAttributes) {
return new LocusIteratorByState(new FakeCloseableIterator<GATKSAMRecord>(reads.iterator()),
readAttributes,
genomeLocParser,
LocusIteratorByState.sampleListForSAMWithoutReadGroups());
}
public static ReadProperties createTestReadProperties() {
return createTestReadProperties(null, false);
}
public static ReadProperties createTestReadProperties( DownsamplingMethod downsamplingMethod, final boolean keepReads ) {
return new ReadProperties(
Collections.<SAMReaderID>emptyList(),
new SAMFileHeader(),
SAMFileHeader.SortOrder.coordinate,
false,
SAMFileReader.ValidationStringency.STRICT,
downsamplingMethod,
new ValidationExclusion(),
Collections.<ReadFilter>emptyList(),
Collections.<ReadTransformer>emptyList(),
true,
(byte) -1,
keepReads);
}
public static class FakeCloseableIterator<T> implements CloseableIterator<T> {
Iterator<T> iterator;
public FakeCloseableIterator(Iterator<T> it) {
iterator = it;
}
@Override
public void close() {}
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public T next() {
return iterator.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException("Don't remove!");
}
}
protected static class LIBSTest {
public static final int locus = 44367788;
final String cigarString;
final int readLength;
final private List<CigarElement> elements;
public LIBSTest(final String cigarString) {
final Cigar cigar = TextCigarCodec.getSingleton().decode(cigarString);
this.cigarString = cigarString;
this.elements = cigar.getCigarElements();
this.readLength = cigar.getReadLength();
}
@Override
public String toString() {
return "LIBSTest{" +
"cigar='" + cigarString + '\'' +
", readLength=" + readLength +
'}';
}
public List<CigarElement> getElements() {
return elements;
}
public GATKSAMRecord makeRead() {
GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, locus, readLength);
read.setReadBases(Utils.dupBytes((byte) 'A', readLength));
final byte[] quals = new byte[readLength];
for ( int i = 0; i < readLength; i++ )
quals[i] = (byte)(i % QualityUtils.MAX_QUAL_SCORE);
read.setBaseQualities(quals);
read.setCigarString(cigarString);
return read;
}
}
private boolean isIndel(final CigarElement ce) {
return ce.getOperator() == CigarOperator.D || ce.getOperator() == CigarOperator.I;
}
private boolean startsWithDeletion(final List<CigarElement> elements) {
for ( final CigarElement element : elements ) {
switch ( element.getOperator() ) {
case M:
case I:
case EQ:
case X:
return false;
case D:
return true;
default:
// keep looking
}
}
return false;
}
private LIBSTest makePermutationTest(final List<CigarElement> elements) {
CigarElement last = null;
boolean hasMatch = false;
// starts with D => bad
if ( startsWithDeletion(elements) )
return null;
// ends with D => bad
if ( elements.get(elements.size()-1).getOperator() == CigarOperator.D )
return null;
// make sure it's valid
String cigar = "";
int len = 0;
for ( final CigarElement ce : elements ) {
if ( ce.getOperator() == CigarOperator.N )
return null; // TODO -- don't support N
// abort on a bad cigar
if ( last != null ) {
if ( ce.getOperator() == last.getOperator() )
return null;
if ( isIndel(ce) && isIndel(last) )
return null;
}
cigar += ce.getLength() + ce.getOperator().toString();
len += ce.getLength();
last = ce;
hasMatch = hasMatch || ce.getOperator() == CigarOperator.M;
}
if ( ! hasMatch && elements.size() == 1 &&
! (last.getOperator() == CigarOperator.I || last.getOperator() == CigarOperator.S))
return null;
return new LIBSTest(cigar);
}
@DataProvider(name = "LIBSTest")
public Object[][] createLIBSTests(final List<Integer> cigarLengths, final List<Integer> combinations) {
final List<Object[]> tests = new LinkedList<Object[]>();
final List<CigarOperator> allOps = Arrays.asList(CigarOperator.values());
final List<CigarElement> singleCigars = new LinkedList<CigarElement>();
for ( final int len : cigarLengths )
for ( final CigarOperator op : allOps )
singleCigars.add(new CigarElement(len, op));
for ( final int complexity : combinations ) {
for ( final List<CigarElement> elements : Utils.makePermutations(singleCigars, complexity, true) ) {
final LIBSTest test = makePermutationTest(elements);
if ( test != null ) tests.add(new Object[]{test});
}
}
return tests.toArray(new Object[][]{});
}
/**
* Work around inadequate tests that aren't worth fixing.
*
* Look at the CIGAR 2M2P2D2P2M. Both M states border a deletion, separated by P (padding elements). So
* the right answer for deletions here is true for isBeforeDeletion() and isAfterDeletion() for the first
* and second M. But the LIBS_position doesn't say so.
*
* @param elements
* @return
*/
protected static boolean hasNeighboringPaddedOps(final List<CigarElement> elements, final int elementI) {
return (elementI - 1 >= 0 && isPadding(elements.get(elementI-1))) ||
(elementI + 1 < elements.size() && isPadding(elements.get(elementI+1)));
}
private static boolean isPadding(final CigarElement elt) {
return elt.getOperator() == CigarOperator.P || elt.getOperator() == CigarOperator.H || elt.getOperator() == CigarOperator.S;
}
}

View File

@ -0,0 +1,689 @@
/*
* Copyright (c) 2012 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.utils.locusiterator;
import net.sf.samtools.CigarOperator;
import net.sf.samtools.SAMFileHeader;
import net.sf.samtools.SAMReadGroupRecord;
import org.broadinstitute.sting.gatk.ReadProperties;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.downsampling.DownsampleType;
import org.broadinstitute.sting.gatk.downsampling.DownsamplingMethod;
import org.broadinstitute.sting.utils.NGSPlatform;
import org.broadinstitute.sting.utils.QualityUtils;
import org.broadinstitute.sting.utils.Utils;
import org.broadinstitute.sting.utils.pileup.PileupElement;
import org.broadinstitute.sting.utils.pileup.ReadBackedPileup;
import org.broadinstitute.sting.utils.sam.ArtificialBAMBuilder;
import org.broadinstitute.sting.utils.sam.ArtificialSAMUtils;
import org.broadinstitute.sting.utils.sam.GATKSAMReadGroupRecord;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import org.testng.Assert;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.util.*;
/**
* testing of the new (non-legacy) version of LocusIteratorByState
*/
public class LocusIteratorByStateUnitTest extends LocusIteratorByStateBaseTest {
private static final boolean DEBUG = false;
protected LocusIteratorByState li;
@Test(enabled = true)
public void testUnmappedAndAllIReadsPassThrough() {
final int readLength = 10;
GATKSAMRecord mapped1 = ArtificialSAMUtils.createArtificialRead(header,"mapped1",0,1,readLength);
GATKSAMRecord mapped2 = ArtificialSAMUtils.createArtificialRead(header,"mapped2",0,1,readLength);
GATKSAMRecord unmapped = ArtificialSAMUtils.createArtificialRead(header,"unmapped",0,1,readLength);
GATKSAMRecord allI = ArtificialSAMUtils.createArtificialRead(header,"allI",0,1,readLength);
unmapped.setReadUnmappedFlag(true);
unmapped.setCigarString("*");
allI.setCigarString(readLength + "I");
List<GATKSAMRecord> reads = Arrays.asList(mapped1, unmapped, allI, mapped2);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads,createTestReadProperties(DownsamplingMethod.NONE, true));
Assert.assertTrue(li.hasNext());
AlignmentContext context = li.next();
ReadBackedPileup pileup = context.getBasePileup();
Assert.assertEquals(pileup.depthOfCoverage(), 2, "Should see only 2 reads in pileup, even with unmapped and all I reads");
final List<GATKSAMRecord> rawReads = li.transferReadsFromAllPreviousPileups();
Assert.assertEquals(rawReads, reads, "Input and transferred read lists should be the same, and include the unmapped and all I reads");
}
@Test(enabled = true && ! DEBUG)
public void testXandEQOperators() {
final byte[] bases1 = new byte[] {'A','A','A','A','A','A','A','A','A','A'};
final byte[] bases2 = new byte[] {'A','A','A','C','A','A','A','A','A','C'};
// create a test version of the Reads object
ReadProperties readAttributes = createTestReadProperties();
GATKSAMRecord r1 = ArtificialSAMUtils.createArtificialRead(header,"r1",0,1,10);
r1.setReadBases(bases1);
r1.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
r1.setCigarString("10M");
GATKSAMRecord r2 = ArtificialSAMUtils.createArtificialRead(header,"r2",0,1,10);
r2.setReadBases(bases2);
r2.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20});
r2.setCigarString("3=1X5=1X");
GATKSAMRecord r3 = ArtificialSAMUtils.createArtificialRead(header,"r3",0,1,10);
r3.setReadBases(bases2);
r3.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20});
r3.setCigarString("3=1X5M1X");
GATKSAMRecord r4 = ArtificialSAMUtils.createArtificialRead(header,"r4",0,1,10);
r4.setReadBases(bases2);
r4.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
r4.setCigarString("10M");
List<GATKSAMRecord> reads = Arrays.asList(r1, r2, r3, r4);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads,readAttributes);
while (li.hasNext()) {
AlignmentContext context = li.next();
ReadBackedPileup pileup = context.getBasePileup();
Assert.assertEquals(pileup.depthOfCoverage(), 4);
}
}
@Test(enabled = true && ! DEBUG)
public void testIndelsInRegularPileup() {
final byte[] bases = new byte[] {'A','A','A','A','A','A','A','A','A','A'};
final byte[] indelBases = new byte[] {'A','A','A','A','C','T','A','A','A','A','A','A'};
// create a test version of the Reads object
ReadProperties readAttributes = createTestReadProperties();
GATKSAMRecord before = ArtificialSAMUtils.createArtificialRead(header,"before",0,1,10);
before.setReadBases(bases);
before.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
before.setCigarString("10M");
GATKSAMRecord during = ArtificialSAMUtils.createArtificialRead(header,"during",0,2,10);
during.setReadBases(indelBases);
during.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20,20,20});
during.setCigarString("4M2I6M");
GATKSAMRecord after = ArtificialSAMUtils.createArtificialRead(header,"after",0,3,10);
after.setReadBases(bases);
after.setBaseQualities(new byte[] {20,20,20,20,20,20,20,20,20,20});
after.setCigarString("10M");
List<GATKSAMRecord> reads = Arrays.asList(before, during, after);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads,readAttributes);
boolean foundIndel = false;
while (li.hasNext()) {
AlignmentContext context = li.next();
ReadBackedPileup pileup = context.getBasePileup().getBaseFilteredPileup(10);
for (PileupElement p : pileup) {
if (p.isBeforeInsertion()) {
foundIndel = true;
Assert.assertEquals(p.getLengthOfImmediatelyFollowingIndel(), 2, "Wrong event length");
Assert.assertEquals(p.getBasesOfImmediatelyFollowingInsertion(), "CT", "Inserted bases are incorrect");
break;
}
}
}
Assert.assertTrue(foundIndel,"Indel in pileup not found");
}
@Test(enabled = false && ! DEBUG)
public void testWholeIndelReadInIsolation() {
final int firstLocus = 44367789;
// create a test version of the Reads object
ReadProperties readAttributes = createTestReadProperties();
GATKSAMRecord indelOnlyRead = ArtificialSAMUtils.createArtificialRead(header, "indelOnly", 0, firstLocus, 76);
indelOnlyRead.setReadBases(Utils.dupBytes((byte)'A',76));
indelOnlyRead.setBaseQualities(Utils.dupBytes((byte) '@', 76));
indelOnlyRead.setCigarString("76I");
List<GATKSAMRecord> reads = Arrays.asList(indelOnlyRead);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads, readAttributes);
// Traditionally, reads that end with indels bleed into the pileup at the following locus. Verify that the next pileup contains this read
// and considers it to be an indel-containing read.
Assert.assertTrue(li.hasNext(),"Should have found a whole-indel read in the normal base pileup without extended events enabled");
AlignmentContext alignmentContext = li.next();
Assert.assertEquals(alignmentContext.getLocation().getStart(), firstLocus, "Base pileup is at incorrect location.");
ReadBackedPileup basePileup = alignmentContext.getBasePileup();
Assert.assertEquals(basePileup.getReads().size(),1,"Pileup is of incorrect size");
Assert.assertSame(basePileup.getReads().get(0), indelOnlyRead, "Read in pileup is incorrect");
}
/**
* Test to make sure that reads supporting only an indel (example cigar string: 76I) do
* not negatively influence the ordering of the pileup.
*/
@Test(enabled = true && ! DEBUG)
public void testWholeIndelRead() {
final int firstLocus = 44367788, secondLocus = firstLocus + 1;
GATKSAMRecord leadingRead = ArtificialSAMUtils.createArtificialRead(header,"leading",0,firstLocus,76);
leadingRead.setReadBases(Utils.dupBytes((byte)'A',76));
leadingRead.setBaseQualities(Utils.dupBytes((byte)'@',76));
leadingRead.setCigarString("1M75I");
GATKSAMRecord indelOnlyRead = ArtificialSAMUtils.createArtificialRead(header,"indelOnly",0,secondLocus,76);
indelOnlyRead.setReadBases(Utils.dupBytes((byte) 'A', 76));
indelOnlyRead.setBaseQualities(Utils.dupBytes((byte)'@',76));
indelOnlyRead.setCigarString("76I");
GATKSAMRecord fullMatchAfterIndel = ArtificialSAMUtils.createArtificialRead(header,"fullMatch",0,secondLocus,76);
fullMatchAfterIndel.setReadBases(Utils.dupBytes((byte)'A',76));
fullMatchAfterIndel.setBaseQualities(Utils.dupBytes((byte)'@',76));
fullMatchAfterIndel.setCigarString("75I1M");
List<GATKSAMRecord> reads = Arrays.asList(leadingRead, indelOnlyRead, fullMatchAfterIndel);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads, createTestReadProperties());
int currentLocus = firstLocus;
int numAlignmentContextsFound = 0;
while(li.hasNext()) {
AlignmentContext alignmentContext = li.next();
Assert.assertEquals(alignmentContext.getLocation().getStart(),currentLocus,"Current locus returned by alignment context is incorrect");
if(currentLocus == firstLocus) {
List<GATKSAMRecord> readsAtLocus = alignmentContext.getBasePileup().getReads();
Assert.assertEquals(readsAtLocus.size(),1,"Wrong number of reads at locus " + currentLocus);
Assert.assertSame(readsAtLocus.get(0),leadingRead,"leadingRead absent from pileup at locus " + currentLocus);
}
else if(currentLocus == secondLocus) {
List<GATKSAMRecord> readsAtLocus = alignmentContext.getBasePileup().getReads();
Assert.assertEquals(readsAtLocus.size(),1,"Wrong number of reads at locus " + currentLocus);
Assert.assertSame(readsAtLocus.get(0),fullMatchAfterIndel,"fullMatchAfterIndel absent from pileup at locus " + currentLocus);
}
currentLocus++;
numAlignmentContextsFound++;
}
Assert.assertEquals(numAlignmentContextsFound, 2, "Found incorrect number of alignment contexts");
}
/**
* Test to make sure that reads supporting only an indel (example cigar string: 76I) are represented properly
*/
@Test(enabled = false && ! DEBUG)
public void testWholeIndelReadRepresentedTest() {
final int firstLocus = 44367788, secondLocus = firstLocus + 1;
GATKSAMRecord read1 = ArtificialSAMUtils.createArtificialRead(header,"read1",0,secondLocus,1);
read1.setReadBases(Utils.dupBytes((byte) 'A', 1));
read1.setBaseQualities(Utils.dupBytes((byte) '@', 1));
read1.setCigarString("1I");
List<GATKSAMRecord> reads = Arrays.asList(read1);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads, createTestReadProperties());
while(li.hasNext()) {
AlignmentContext alignmentContext = li.next();
ReadBackedPileup p = alignmentContext.getBasePileup();
Assert.assertTrue(p.getNumberOfElements() == 1);
// TODO -- fix tests
// PileupElement pe = p.iterator().next();
// Assert.assertTrue(pe.isBeforeInsertion());
// Assert.assertFalse(pe.isAfterInsertion());
// Assert.assertEquals(pe.getBasesOfImmediatelyFollowingInsertion(), "A");
}
GATKSAMRecord read2 = ArtificialSAMUtils.createArtificialRead(header,"read2",0,secondLocus,10);
read2.setReadBases(Utils.dupBytes((byte) 'A', 10));
read2.setBaseQualities(Utils.dupBytes((byte) '@', 10));
read2.setCigarString("10I");
reads = Arrays.asList(read2);
// create the iterator by state with the fake reads and fake records
li = makeLTBS(reads, createTestReadProperties());
while(li.hasNext()) {
AlignmentContext alignmentContext = li.next();
ReadBackedPileup p = alignmentContext.getBasePileup();
Assert.assertTrue(p.getNumberOfElements() == 1);
// TODO -- fix tests
// PileupElement pe = p.iterator().next();
// Assert.assertTrue(pe.isBeforeInsertion());
// Assert.assertFalse(pe.isAfterInsertion());
// Assert.assertEquals(pe.getBasesOfImmediatelyFollowingInsertion(), "AAAAAAAAAA");
}
}
/////////////////////////////////////////////
// get event length and bases calculations //
/////////////////////////////////////////////
@DataProvider(name = "IndelLengthAndBasesTest")
public Object[][] makeIndelLengthAndBasesTest() {
final String EVENT_BASES = "ACGTACGTACGT";
final List<Object[]> tests = new LinkedList<Object[]>();
for ( int eventSize = 1; eventSize < 10; eventSize++ ) {
for ( final CigarOperator indel : Arrays.asList(CigarOperator.D, CigarOperator.I) ) {
final String cigar = String.format("2M%d%s1M", eventSize, indel.toString());
final String eventBases = indel == CigarOperator.D ? "" : EVENT_BASES.substring(0, eventSize);
final int readLength = 3 + eventBases.length();
GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, "read", 0, 1, readLength);
read.setReadBases(("TT" + eventBases + "A").getBytes());
final byte[] quals = new byte[readLength];
for ( int i = 0; i < readLength; i++ )
quals[i] = (byte)(i % QualityUtils.MAX_QUAL_SCORE);
read.setBaseQualities(quals);
read.setCigarString(cigar);
tests.add(new Object[]{read, indel, eventSize, eventBases.equals("") ? null : eventBases});
}
}
return tests.toArray(new Object[][]{});
}
@Test(enabled = true && ! DEBUG, dataProvider = "IndelLengthAndBasesTest")
public void testIndelLengthAndBasesTest(GATKSAMRecord read, final CigarOperator op, final int eventSize, final String eventBases) {
// create the iterator by state with the fake reads and fake records
li = makeLTBS(Arrays.asList((GATKSAMRecord)read), createTestReadProperties());
Assert.assertTrue(li.hasNext());
final PileupElement firstMatch = getFirstPileupElement(li.next());
Assert.assertEquals(firstMatch.getLengthOfImmediatelyFollowingIndel(), 0, "Length != 0 for site not adjacent to indel");
Assert.assertEquals(firstMatch.getBasesOfImmediatelyFollowingInsertion(), null, "Getbases of following event should be null at non-adajenct event");
Assert.assertTrue(li.hasNext());
final PileupElement pe = getFirstPileupElement(li.next());
if ( op == CigarOperator.D )
Assert.assertTrue(pe.isBeforeDeletionStart());
else
Assert.assertTrue(pe.isBeforeInsertion());
Assert.assertEquals(pe.getLengthOfImmediatelyFollowingIndel(), eventSize, "Length of event failed");
Assert.assertEquals(pe.getBasesOfImmediatelyFollowingInsertion(), eventBases, "Getbases of following event failed");
}
private PileupElement getFirstPileupElement(final AlignmentContext context) {
final ReadBackedPileup p = context.getBasePileup();
Assert.assertEquals(p.getNumberOfElements(), 1);
return p.iterator().next();
}
////////////////////////////////////////////
// comprehensive LIBS/PileupElement tests //
////////////////////////////////////////////
@DataProvider(name = "LIBSTest")
public Object[][] makeLIBSTest() {
final List<Object[]> tests = new LinkedList<Object[]>();
// tests.add(new Object[]{new LIBSTest("2=2D2=2X", 1)});
// return tests.toArray(new Object[][]{});
return createLIBSTests(
Arrays.asList(1, 2),
Arrays.asList(1, 2, 3, 4));
// return createLIBSTests(
// Arrays.asList(2),
// Arrays.asList(3));
}
@Test(enabled = true && ! DEBUG, dataProvider = "LIBSTest")
public void testLIBS(LIBSTest params) {
// create the iterator by state with the fake reads and fake records
final GATKSAMRecord read = params.makeRead();
li = makeLTBS(Arrays.asList((GATKSAMRecord)read), createTestReadProperties());
final LIBS_position tester = new LIBS_position(read);
int bpVisited = 0;
int lastOffset = 0;
while ( li.hasNext() ) {
bpVisited++;
AlignmentContext alignmentContext = li.next();
ReadBackedPileup p = alignmentContext.getBasePileup();
Assert.assertEquals(p.getNumberOfElements(), 1);
PileupElement pe = p.iterator().next();
Assert.assertEquals(p.getNumberOfDeletions(), pe.isDeletion() ? 1 : 0, "wrong number of deletions in the pileup");
Assert.assertEquals(p.getNumberOfMappingQualityZeroReads(), pe.getRead().getMappingQuality() == 0 ? 1 : 0, "wront number of mapq reads in the pileup");
tester.stepForwardOnGenome();
if ( ! hasNeighboringPaddedOps(params.getElements(), pe.getCurrentCigarOffset()) ) {
Assert.assertEquals(pe.isBeforeDeletionStart(), tester.isBeforeDeletionStart, "before deletion start failure");
Assert.assertEquals(pe.isAfterDeletionEnd(), tester.isAfterDeletionEnd, "after deletion end failure");
}
Assert.assertEquals(pe.isBeforeInsertion(), tester.isBeforeInsertion, "before insertion failure");
Assert.assertEquals(pe.isAfterInsertion(), tester.isAfterInsertion, "after insertion failure");
Assert.assertEquals(pe.isNextToSoftClip(), tester.isNextToSoftClip, "next to soft clip failure");
Assert.assertTrue(pe.getOffset() >= lastOffset, "Somehow read offsets are decreasing: lastOffset " + lastOffset + " current " + pe.getOffset());
Assert.assertEquals(pe.getOffset(), tester.getCurrentReadOffset(), "Read offsets are wrong at " + bpVisited);
Assert.assertEquals(pe.getCurrentCigarElement(), read.getCigar().getCigarElement(tester.currentOperatorIndex), "CigarElement index failure");
Assert.assertEquals(pe.getOffsetInCurrentCigar(), tester.getCurrentPositionOnOperatorBase0(), "CigarElement index failure");
Assert.assertEquals(read.getCigar().getCigarElement(pe.getCurrentCigarOffset()), pe.getCurrentCigarElement(), "Current cigar element isn't what we'd get from the read itself");
Assert.assertTrue(pe.getOffsetInCurrentCigar() >= 0, "Offset into current cigar too small");
Assert.assertTrue(pe.getOffsetInCurrentCigar() < pe.getCurrentCigarElement().getLength(), "Offset into current cigar too big");
Assert.assertEquals(pe.getOffset(), tester.getCurrentReadOffset(), "Read offset failure");
lastOffset = pe.getOffset();
}
final int expectedBpToVisit = read.getAlignmentEnd() - read.getAlignmentStart() + 1;
Assert.assertEquals(bpVisited, expectedBpToVisit, "Didn't visit the expected number of bp");
}
// ------------------------------------------------------------
//
// Tests for keeping reads
//
// ------------------------------------------------------------
@DataProvider(name = "LIBS_ComplexPileupTests")
public Object[][] makeLIBS_ComplexPileupTests() {
final List<Object[]> tests = new LinkedList<Object[]>();
for ( final int downsampleTo : Arrays.asList(-1, 1, 2, 5, 10, 30)) {
for ( final int nReadsPerLocus : Arrays.asList(1, 10, 60) ) {
for ( final int nLoci : Arrays.asList(1, 10, 25) ) {
for ( final int nSamples : Arrays.asList(1, 2, 10) ) {
for ( final boolean keepReads : Arrays.asList(true, false) ) {
for ( final boolean grabReadsAfterEachCycle : Arrays.asList(true, false) ) {
// for ( final int downsampleTo : Arrays.asList(1)) {
// for ( final int nReadsPerLocus : Arrays.asList(1) ) {
// for ( final int nLoci : Arrays.asList(1) ) {
// for ( final int nSamples : Arrays.asList(1) ) {
// for ( final boolean keepReads : Arrays.asList(true) ) {
// for ( final boolean grabReadsAfterEachCycle : Arrays.asList(true) ) {
tests.add(new Object[]{nReadsPerLocus, nLoci, nSamples,
keepReads, grabReadsAfterEachCycle,
downsampleTo});
}
}
}
}
}
}
return tests.toArray(new Object[][]{});
}
@Test(enabled = true && ! DEBUG, dataProvider = "LIBS_ComplexPileupTests")
public void testLIBS_ComplexPileupTests(final int nReadsPerLocus,
final int nLoci,
final int nSamples,
final boolean keepReads,
final boolean grabReadsAfterEachCycle,
final int downsampleTo) {
//logger.warn(String.format("testLIBSKeepSubmittedReads %d %d %d %b %b %b", nReadsPerLocus, nLoci, nSamples, keepReads, grabReadsAfterEachCycle, downsample));
final int readLength = 10;
final boolean downsample = downsampleTo != -1;
final DownsamplingMethod downsampler = downsample
? new DownsamplingMethod(DownsampleType.BY_SAMPLE, downsampleTo, null, false)
: new DownsamplingMethod(DownsampleType.NONE, null, null, false);
final ArtificialBAMBuilder bamBuilder = new ArtificialBAMBuilder(header.getSequenceDictionary(), nReadsPerLocus, nLoci);
bamBuilder.createAndSetHeader(nSamples).setReadLength(readLength).setAlignmentStart(1);
final List<GATKSAMRecord> reads = bamBuilder.makeReads();
li = new LocusIteratorByState(new FakeCloseableIterator<GATKSAMRecord>(reads.iterator()),
createTestReadProperties(downsampler, keepReads),
genomeLocParser,
bamBuilder.getSamples());
final Set<GATKSAMRecord> seenSoFar = new HashSet<GATKSAMRecord>();
final Set<GATKSAMRecord> keptReads = new HashSet<GATKSAMRecord>();
int bpVisited = 0;
while ( li.hasNext() ) {
bpVisited++;
final AlignmentContext alignmentContext = li.next();
final ReadBackedPileup p = alignmentContext.getBasePileup();
AssertWellOrderedPileup(p);
if ( downsample ) {
// just not a safe test
//Assert.assertTrue(p.getNumberOfElements() <= maxDownsampledCoverage * nSamples, "Too many reads at locus after downsampling");
} else {
final int minPileupSize = nReadsPerLocus * nSamples;
Assert.assertTrue(p.getNumberOfElements() >= minPileupSize);
}
// the number of reads starting here
int nReadsStartingHere = 0;
for ( final GATKSAMRecord read : p.getReads() )
if ( read.getAlignmentStart() == alignmentContext.getPosition() )
nReadsStartingHere++;
// we can have no more than maxDownsampledCoverage per sample
final int maxCoveragePerLocus = downsample ? downsampleTo : nReadsPerLocus;
Assert.assertTrue(nReadsStartingHere <= maxCoveragePerLocus * nSamples);
seenSoFar.addAll(p.getReads());
if ( keepReads && grabReadsAfterEachCycle ) {
final List<GATKSAMRecord> locusReads = li.transferReadsFromAllPreviousPileups();
if ( downsample ) {
// with downsampling we might have some reads here that were downsampled away
// in the pileup. We want to ensure that no more than the max coverage per sample is added
Assert.assertTrue(locusReads.size() >= nReadsStartingHere);
Assert.assertTrue(locusReads.size() <= maxCoveragePerLocus * nSamples);
} else {
Assert.assertEquals(locusReads.size(), nReadsStartingHere);
}
keptReads.addAll(locusReads);
// check that all reads we've seen so far are in our keptReads
for ( final GATKSAMRecord read : seenSoFar ) {
Assert.assertTrue(keptReads.contains(read), "A read that appeared in a pileup wasn't found in the kept reads: " + read);
}
}
if ( ! keepReads )
Assert.assertTrue(li.getReadsFromAllPreviousPileups().isEmpty(), "Not keeping reads but the underlying list of reads isn't empty");
}
if ( keepReads && ! grabReadsAfterEachCycle )
keptReads.addAll(li.transferReadsFromAllPreviousPileups());
if ( ! downsample ) { // downsampling may drop loci
final int expectedBpToVisit = nLoci + readLength - 1;
Assert.assertEquals(bpVisited, expectedBpToVisit, "Didn't visit the expected number of bp");
}
if ( keepReads ) {
// check we have the right number of reads
final int totalReads = nLoci * nReadsPerLocus * nSamples;
if ( ! downsample ) { // downsampling may drop reads
Assert.assertEquals(keptReads.size(), totalReads, "LIBS didn't keep the right number of reads during the traversal");
// check that the order of reads is the same as in our read list
for ( int i = 0; i < reads.size(); i++ ) {
final GATKSAMRecord inputRead = reads.get(i);
final GATKSAMRecord keptRead = reads.get(i);
Assert.assertSame(keptRead, inputRead, "Input reads and kept reads differ at position " + i);
}
} else {
Assert.assertTrue(keptReads.size() <= totalReads, "LIBS didn't keep the right number of reads during the traversal");
}
// check uniqueness
final Set<String> readNames = new HashSet<String>();
for ( final GATKSAMRecord read : keptReads ) {
Assert.assertFalse(readNames.contains(read.getReadName()), "Found duplicate reads in the kept reads");
readNames.add(read.getReadName());
}
// check that all reads we've seen are in our keptReads
for ( final GATKSAMRecord read : seenSoFar ) {
Assert.assertTrue(keptReads.contains(read), "A read that appeared in a pileup wasn't found in the kept reads: " + read);
}
if ( ! downsample ) {
// check that every read in the list of keep reads occurred at least once in one of the pileups
for ( final GATKSAMRecord keptRead : keptReads ) {
Assert.assertTrue(seenSoFar.contains(keptRead), "There's a read " + keptRead + " in our keptReads list that never appeared in any pileup");
}
}
}
}
private void AssertWellOrderedPileup(final ReadBackedPileup pileup) {
if ( ! pileup.isEmpty() ) {
int leftMostPos = -1;
for ( final PileupElement pe : pileup ) {
Assert.assertTrue(pileup.getLocation().getContig().equals(pe.getRead().getReferenceName()), "ReadBackedPileup contains an element " + pe + " that's on a different contig than the pileup itself");
Assert.assertTrue(pe.getRead().getAlignmentStart() >= leftMostPos,
"ReadBackedPileup contains an element " + pe + " whose read's alignment start " + pe.getRead().getAlignmentStart()
+ " occurs before the leftmost position we've seen previously " + leftMostPos);
}
}
}
// ---------------------------------------------------------------------------
// make sure that downsampling isn't holding onto a bazillion reads
//
@DataProvider(name = "LIBS_NotHoldingTooManyReads")
public Object[][] makeLIBS_NotHoldingTooManyReads() {
final List<Object[]> tests = new LinkedList<Object[]>();
for ( final int downsampleTo : Arrays.asList(1, 10)) {
for ( final int nReadsPerLocus : Arrays.asList(100, 1000, 10000, 100000) ) {
for ( final int payloadInBytes : Arrays.asList(0, 1024, 1024*1024) ) {
tests.add(new Object[]{nReadsPerLocus, downsampleTo, payloadInBytes});
}
}
}
return tests.toArray(new Object[][]{});
}
@Test(enabled = true && ! DEBUG, dataProvider = "LIBS_NotHoldingTooManyReads")
// @Test(enabled = true, dataProvider = "LIBS_NotHoldingTooManyReads", timeOut = 100000)
public void testLIBS_NotHoldingTooManyReads(final int nReadsPerLocus, final int downsampleTo, final int payloadInBytes) {
logger.warn(String.format("testLIBS_NotHoldingTooManyReads %d %d %d", nReadsPerLocus, downsampleTo, payloadInBytes));
final int readLength = 10;
final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1, 1, 100000);
final int nSamples = 1;
final List<String> samples = new ArrayList<String>(nSamples);
for ( int i = 0; i < nSamples; i++ ) {
final GATKSAMReadGroupRecord rg = new GATKSAMReadGroupRecord("rg" + i);
final String sample = "sample" + i;
samples.add(sample);
rg.setSample(sample);
rg.setPlatform(NGSPlatform.ILLUMINA.getDefaultPlatform());
header.addReadGroup(rg);
}
final boolean downsample = downsampleTo != -1;
final DownsamplingMethod downsampler = downsample
? new DownsamplingMethod(DownsampleType.BY_SAMPLE, downsampleTo, null, false)
: new DownsamplingMethod(DownsampleType.NONE, null, null, false);
// final List<GATKSAMRecord> reads = ArtificialSAMUtils.createReadStream(nReadsPerLocus, nLoci, header, 1, readLength);
final WeakReadTrackingIterator iterator = new WeakReadTrackingIterator(nReadsPerLocus, readLength, payloadInBytes, header);
li = new LocusIteratorByState(iterator,
createTestReadProperties(downsampler, false),
genomeLocParser,
samples);
while ( li.hasNext() ) {
final AlignmentContext next = li.next();
Assert.assertTrue(next.getBasePileup().getNumberOfElements() <= downsampleTo, "Too many elements in pileup " + next);
// TODO -- assert that there are <= X reads in memory after GC for some X
}
}
private static class WeakReadTrackingIterator implements Iterator<GATKSAMRecord> {
final int nReads, readLength, payloadInBytes;
int readI = 0;
final SAMFileHeader header;
private WeakReadTrackingIterator(int nReads, int readLength, final int payloadInBytes, final SAMFileHeader header) {
this.nReads = nReads;
this.readLength = readLength;
this.header = header;
this.payloadInBytes = payloadInBytes;
}
@Override public boolean hasNext() { return readI < nReads; }
@Override public void remove() { throw new UnsupportedOperationException("no remove"); }
@Override
public GATKSAMRecord next() {
readI++;
return makeRead();
}
private GATKSAMRecord makeRead() {
final SAMReadGroupRecord rg = header.getReadGroups().get(0);
final String readName = String.format("%s.%d.%s", "read", readI, rg.getId());
final GATKSAMRecord read = ArtificialSAMUtils.createArtificialRead(header, readName, 0, 1, readLength);
read.setReadGroup(new GATKSAMReadGroupRecord(rg));
if ( payloadInBytes > 0 )
// add a payload byte array to push memory use per read even higher
read.setAttribute("PL", new byte[payloadInBytes]);
return read;
}
}
}

Some files were not shown because too many files have changed in this diff Show More