Renaming the HapMap codec and feature to RawHapMap so that we don't get esoteric errors when trying to bind a rod with the name 'hapmap' (since it was also a feature).

This commit is contained in:
Eric Banks 2011-08-12 11:11:56 -04:00
parent f5b2cc4977
commit 27f0748b33
6 changed files with 40 additions and 40 deletions

View File

@ -6,7 +6,7 @@ import org.broad.tribble.gelitext.GeliTextFeature;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext; import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.refdata.features.DbSNPHelper; import org.broadinstitute.sting.gatk.refdata.features.DbSNPHelper;
import org.broadinstitute.sting.utils.classloader.PluginManager; import org.broadinstitute.sting.utils.classloader.PluginManager;
import org.broadinstitute.sting.utils.codecs.hapmap.HapMapFeature; import org.broadinstitute.sting.utils.codecs.hapmap.RawHapMapFeature;
import org.broadinstitute.sting.utils.codecs.vcf.VCFHeader; import org.broadinstitute.sting.utils.codecs.vcf.VCFHeader;
import org.broadinstitute.sting.utils.codecs.vcf.VCFHeaderLine; import org.broadinstitute.sting.utils.codecs.vcf.VCFHeaderLine;
import org.broadinstitute.sting.utils.variantcontext.*; import org.broadinstitute.sting.utils.variantcontext.*;
@ -226,7 +226,7 @@ public class VariantContextAdaptors {
* @return HapMapFeature. * @return HapMapFeature.
*/ */
@Override @Override
public Class<? extends Feature> getAdaptableFeatureType() { return HapMapFeature.class; } public Class<? extends Feature> getAdaptableFeatureType() { return RawHapMapFeature.class; }
/** /**
* convert to a Variant Context, given: * convert to a Variant Context, given:
@ -240,7 +240,7 @@ public class VariantContextAdaptors {
if ( ref == null ) if ( ref == null )
throw new UnsupportedOperationException("Conversion from HapMap to VariantContext requires a reference context"); throw new UnsupportedOperationException("Conversion from HapMap to VariantContext requires a reference context");
HapMapFeature hapmap = (HapMapFeature)input; RawHapMapFeature hapmap = (RawHapMapFeature)input;
int index = hapmap.getStart() - ref.getWindow().getStart(); int index = hapmap.getStart() - ref.getWindow().getStart();
if ( index < 0 ) if ( index < 0 )
@ -255,7 +255,7 @@ public class VariantContextAdaptors {
// use the actual alleles, if available // use the actual alleles, if available
if ( alleleMap != null ) { if ( alleleMap != null ) {
alleles.addAll(alleleMap.values()); alleles.addAll(alleleMap.values());
Allele deletionAllele = alleleMap.get(HapMapFeature.INSERTION); // yes, use insertion here (since we want the reference bases) Allele deletionAllele = alleleMap.get(RawHapMapFeature.INSERTION); // yes, use insertion here (since we want the reference bases)
if ( deletionAllele != null && deletionAllele.isReference() ) if ( deletionAllele != null && deletionAllele.isReference() )
deletionLength = deletionAllele.length(); deletionLength = deletionAllele.length();
} else { } else {

View File

@ -40,7 +40,7 @@ import org.broadinstitute.sting.gatk.walkers.*;
import org.broadinstitute.sting.utils.BaseUtils; import org.broadinstitute.sting.utils.BaseUtils;
import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.SampleUtils; import org.broadinstitute.sting.utils.SampleUtils;
import org.broadinstitute.sting.utils.codecs.hapmap.HapMapFeature; import org.broadinstitute.sting.utils.codecs.hapmap.RawHapMapFeature;
import org.broadinstitute.sting.utils.codecs.vcf.*; import org.broadinstitute.sting.utils.codecs.vcf.*;
import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.exceptions.UserException;
import org.broadinstitute.sting.utils.variantcontext.Allele; import org.broadinstitute.sting.utils.variantcontext.Allele;
@ -124,19 +124,19 @@ public class VariantsToVCF extends RodWalker<Integer, Integer> {
for ( Feature record : features ) { for ( Feature record : features ) {
if ( VariantContextAdaptors.canBeConvertedToVariantContext(record) ) { if ( VariantContextAdaptors.canBeConvertedToVariantContext(record) ) {
// we need to special case the HapMap format because indels aren't handled correctly // we need to special case the HapMap format because indels aren't handled correctly
if ( record instanceof HapMapFeature) { if ( record instanceof RawHapMapFeature) {
// is it an indel? // is it an indel?
HapMapFeature hapmap = (HapMapFeature)record; RawHapMapFeature hapmap = (RawHapMapFeature)record;
if ( hapmap.getAlleles()[0].equals(HapMapFeature.NULL_ALLELE_STRING) || hapmap.getAlleles()[1].equals(HapMapFeature.NULL_ALLELE_STRING) ) { if ( hapmap.getAlleles()[0].equals(RawHapMapFeature.NULL_ALLELE_STRING) || hapmap.getAlleles()[1].equals(RawHapMapFeature.NULL_ALLELE_STRING) ) {
// get the dbsnp object corresponding to this record (needed to help us distinguish between insertions and deletions) // get the dbsnp object corresponding to this record (needed to help us distinguish between insertions and deletions)
VariantContext dbsnpVC = getDbsnp(hapmap.getName()); VariantContext dbsnpVC = getDbsnp(hapmap.getName());
if ( dbsnpVC == null || dbsnpVC.isMixed() ) if ( dbsnpVC == null || dbsnpVC.isMixed() )
continue; continue;
Map<String, Allele> alleleMap = new HashMap<String, Allele>(2); Map<String, Allele> alleleMap = new HashMap<String, Allele>(2);
alleleMap.put(HapMapFeature.DELETION, Allele.create(Allele.NULL_ALLELE_STRING, dbsnpVC.isInsertion())); alleleMap.put(RawHapMapFeature.DELETION, Allele.create(Allele.NULL_ALLELE_STRING, dbsnpVC.isInsertion()));
alleleMap.put(HapMapFeature.INSERTION, Allele.create(((HapMapFeature)record).getAlleles()[1], !dbsnpVC.isInsertion())); alleleMap.put(RawHapMapFeature.INSERTION, Allele.create(((RawHapMapFeature)record).getAlleles()[1], !dbsnpVC.isInsertion()));
hapmap.setActualAlleles(alleleMap); hapmap.setActualAlleles(alleleMap);
// also, use the correct positioning for insertions // also, use the correct positioning for insertions
@ -212,8 +212,8 @@ public class VariantsToVCF extends RodWalker<Integer, Integer> {
throw new IllegalStateException("No rod data is present, but we just created a VariantContext"); throw new IllegalStateException("No rod data is present, but we just created a VariantContext");
Feature f = features.get(0); Feature f = features.get(0);
if ( f instanceof HapMapFeature ) if ( f instanceof RawHapMapFeature )
samples.addAll(Arrays.asList(((HapMapFeature)f).getSampleIDs())); samples.addAll(Arrays.asList(((RawHapMapFeature)f).getSampleIDs()));
else else
samples.addAll(vc.getSampleNames()); samples.addAll(vc.getSampleNames());
} }

View File

@ -40,7 +40,7 @@ import java.util.Arrays;
* in the file * in the file
* *
*/ */
public class HapMapCodec implements FeatureCodec { public class RawHapMapCodec implements FeatureCodec {
// the minimum number of features in the HapMap file line // the minimum number of features in the HapMap file line
private static final int minimumFeatureCount = 11; private static final int minimumFeatureCount = 11;
@ -67,7 +67,7 @@ public class HapMapCodec implements FeatureCodec {
throw new IllegalArgumentException("Unable to parse line " + line + ", the length of split features is less than the minimum of " + minimumFeatureCount); throw new IllegalArgumentException("Unable to parse line " + line + ", the length of split features is less than the minimum of " + minimumFeatureCount);
// create a new feature given the array // create a new feature given the array
return new HapMapFeature(array[0], return new RawHapMapFeature(array[0],
array[1].split("/"), array[1].split("/"),
array[2], array[2],
Long.valueOf(array[3]), Long.valueOf(array[3]),
@ -83,7 +83,7 @@ public class HapMapCodec implements FeatureCodec {
} }
public Class getFeatureType() { public Class getFeatureType() {
return HapMapFeature.class; return RawHapMapFeature.class;
} }
public Object readHeader(LineReader reader) { public Object readHeader(LineReader reader) {

View File

@ -35,7 +35,7 @@ import java.util.Map;
* a feature returned by the HapMap Codec - it represents contig, position, name, * a feature returned by the HapMap Codec - it represents contig, position, name,
* alleles, other hapmap information, and genotypes for specified samples * alleles, other hapmap information, and genotypes for specified samples
*/ */
public class HapMapFeature implements Feature { public class RawHapMapFeature implements Feature {
public static final String NULL_ALLELE_STRING = "-"; public static final String NULL_ALLELE_STRING = "-";
public static final String INSERTION = "I"; public static final String INSERTION = "I";
@ -72,7 +72,7 @@ public class HapMapFeature implements Feature {
* @param qccode ?? * @param qccode ??
* @param genotypes a list of strings, representing the genotypes for the list of samples * @param genotypes a list of strings, representing the genotypes for the list of samples
*/ */
public HapMapFeature(String name, public RawHapMapFeature(String name,
String[] alleles, String[] alleles,
String contig, String contig,
Long position, Long position,

View File

@ -76,7 +76,7 @@ public class VariantsToVCFIntegrationTest extends WalkerTest {
WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec( WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
"-R " + b36KGReference + "-R " + b36KGReference +
" --variant:HapMap " + validationDataLocation + "rawHapMap.yri.chr1.txt" + " --variant:RawHapMap " + validationDataLocation + "rawHapMap.yri.chr1.txt" +
" -T VariantsToVCF" + " -T VariantsToVCF" +
" -L 1:1-1,000,000" + " -L 1:1-1,000,000" +
" -o %s" + " -o %s" +

View File

@ -49,12 +49,12 @@ public class HapMapUnitTest {
*/ */
@Test @Test
public void testReadHeader() { public void testReadHeader() {
HapMapCodec codec = new HapMapCodec(); RawHapMapCodec codec = new RawHapMapCodec();
AsciiLineReader reader = getReader(); AsciiLineReader reader = getReader();
try { try {
String header = reader.readLine(); String header = reader.readLine();
reader.close(); reader.close();
Assert.assertTrue(header.equals((String)codec.readHeader(getReader()))); Assert.assertTrue(header.equals(codec.readHeader(getReader())));
} catch (IOException e) { } catch (IOException e) {
Assert.fail("Unable to read from file " + hapMapFile); Assert.fail("Unable to read from file " + hapMapFile);
} }
@ -63,8 +63,8 @@ public class HapMapUnitTest {
@Test @Test
public void testKnownRecordConversion() { public void testKnownRecordConversion() {
HapMapCodec codec = new HapMapCodec(); RawHapMapCodec codec = new RawHapMapCodec();
HapMapFeature feature = (HapMapFeature)codec.decode(knownLine); RawHapMapFeature feature = (RawHapMapFeature)codec.decode(knownLine);
// check that the alleles are right // check that the alleles are right
@ -110,16 +110,16 @@ public class HapMapUnitTest {
@Test @Test
public void testReadCorrectNumberOfRecords() { public void testReadCorrectNumberOfRecords() {
// setup the record for reading our 500 line file (499 records, 1 header line) // setup the record for reading our 500 line file (499 records, 1 header line)
HapMapCodec codec = new HapMapCodec(); RawHapMapCodec codec = new RawHapMapCodec();
AsciiLineReader reader = getReader(); AsciiLineReader reader = getReader();
String line = null; String line;
int count = 0; int count = 0;
try { try {
codec.readHeader(reader); codec.readHeader(reader);
line = reader.readLine(); line = reader.readLine();
while (line != null) { while (line != null) {
HapMapFeature feature = (HapMapFeature) codec.decode(line); codec.decode(line);
line = reader.readLine(); line = reader.readLine();
++count; ++count;
} }
@ -133,14 +133,14 @@ public class HapMapUnitTest {
@Test @Test
public void testGetSampleNames() { public void testGetSampleNames() {
// setup the record for reading our 500 line file (499 records, 1 header line) // setup the record for reading our 500 line file (499 records, 1 header line)
HapMapCodec codec = new HapMapCodec(); RawHapMapCodec codec = new RawHapMapCodec();
AsciiLineReader reader = getReader(); AsciiLineReader reader = getReader();
String line = null; String line;
try { try {
codec.readHeader(reader); codec.readHeader(reader);
line = reader.readLine(); line = reader.readLine();
HapMapFeature feature = (HapMapFeature) codec.decode(line); RawHapMapFeature feature = (RawHapMapFeature) codec.decode(line);
Assert.assertEquals(feature.getSampleIDs().length,87); Assert.assertEquals(feature.getSampleIDs().length,87);
} catch (IOException e) { } catch (IOException e) {