Renaming the HapMap codec and feature to RawHapMap so that we don't get esoteric errors when trying to bind a rod with the name 'hapmap' (since it was also a feature).

This commit is contained in:
Eric Banks 2011-08-12 11:11:56 -04:00
parent f5b2cc4977
commit 27f0748b33
6 changed files with 40 additions and 40 deletions

View File

@ -6,7 +6,7 @@ import org.broad.tribble.gelitext.GeliTextFeature;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.refdata.features.DbSNPHelper;
import org.broadinstitute.sting.utils.classloader.PluginManager;
import org.broadinstitute.sting.utils.codecs.hapmap.HapMapFeature;
import org.broadinstitute.sting.utils.codecs.hapmap.RawHapMapFeature;
import org.broadinstitute.sting.utils.codecs.vcf.VCFHeader;
import org.broadinstitute.sting.utils.codecs.vcf.VCFHeaderLine;
import org.broadinstitute.sting.utils.variantcontext.*;
@ -226,7 +226,7 @@ public class VariantContextAdaptors {
* @return HapMapFeature.
*/
@Override
public Class<? extends Feature> getAdaptableFeatureType() { return HapMapFeature.class; }
public Class<? extends Feature> getAdaptableFeatureType() { return RawHapMapFeature.class; }
/**
* convert to a Variant Context, given:
@ -240,7 +240,7 @@ public class VariantContextAdaptors {
if ( ref == null )
throw new UnsupportedOperationException("Conversion from HapMap to VariantContext requires a reference context");
HapMapFeature hapmap = (HapMapFeature)input;
RawHapMapFeature hapmap = (RawHapMapFeature)input;
int index = hapmap.getStart() - ref.getWindow().getStart();
if ( index < 0 )
@ -255,7 +255,7 @@ public class VariantContextAdaptors {
// use the actual alleles, if available
if ( alleleMap != null ) {
alleles.addAll(alleleMap.values());
Allele deletionAllele = alleleMap.get(HapMapFeature.INSERTION); // yes, use insertion here (since we want the reference bases)
Allele deletionAllele = alleleMap.get(RawHapMapFeature.INSERTION); // yes, use insertion here (since we want the reference bases)
if ( deletionAllele != null && deletionAllele.isReference() )
deletionLength = deletionAllele.length();
} else {

View File

@ -40,7 +40,7 @@ import org.broadinstitute.sting.gatk.walkers.*;
import org.broadinstitute.sting.utils.BaseUtils;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.SampleUtils;
import org.broadinstitute.sting.utils.codecs.hapmap.HapMapFeature;
import org.broadinstitute.sting.utils.codecs.hapmap.RawHapMapFeature;
import org.broadinstitute.sting.utils.codecs.vcf.*;
import org.broadinstitute.sting.utils.exceptions.UserException;
import org.broadinstitute.sting.utils.variantcontext.Allele;
@ -124,19 +124,19 @@ public class VariantsToVCF extends RodWalker<Integer, Integer> {
for ( Feature record : features ) {
if ( VariantContextAdaptors.canBeConvertedToVariantContext(record) ) {
// we need to special case the HapMap format because indels aren't handled correctly
if ( record instanceof HapMapFeature) {
if ( record instanceof RawHapMapFeature) {
// is it an indel?
HapMapFeature hapmap = (HapMapFeature)record;
if ( hapmap.getAlleles()[0].equals(HapMapFeature.NULL_ALLELE_STRING) || hapmap.getAlleles()[1].equals(HapMapFeature.NULL_ALLELE_STRING) ) {
RawHapMapFeature hapmap = (RawHapMapFeature)record;
if ( hapmap.getAlleles()[0].equals(RawHapMapFeature.NULL_ALLELE_STRING) || hapmap.getAlleles()[1].equals(RawHapMapFeature.NULL_ALLELE_STRING) ) {
// get the dbsnp object corresponding to this record (needed to help us distinguish between insertions and deletions)
VariantContext dbsnpVC = getDbsnp(hapmap.getName());
if ( dbsnpVC == null || dbsnpVC.isMixed() )
continue;
Map<String, Allele> alleleMap = new HashMap<String, Allele>(2);
alleleMap.put(HapMapFeature.DELETION, Allele.create(Allele.NULL_ALLELE_STRING, dbsnpVC.isInsertion()));
alleleMap.put(HapMapFeature.INSERTION, Allele.create(((HapMapFeature)record).getAlleles()[1], !dbsnpVC.isInsertion()));
alleleMap.put(RawHapMapFeature.DELETION, Allele.create(Allele.NULL_ALLELE_STRING, dbsnpVC.isInsertion()));
alleleMap.put(RawHapMapFeature.INSERTION, Allele.create(((RawHapMapFeature)record).getAlleles()[1], !dbsnpVC.isInsertion()));
hapmap.setActualAlleles(alleleMap);
// also, use the correct positioning for insertions
@ -212,8 +212,8 @@ public class VariantsToVCF extends RodWalker<Integer, Integer> {
throw new IllegalStateException("No rod data is present, but we just created a VariantContext");
Feature f = features.get(0);
if ( f instanceof HapMapFeature )
samples.addAll(Arrays.asList(((HapMapFeature)f).getSampleIDs()));
if ( f instanceof RawHapMapFeature )
samples.addAll(Arrays.asList(((RawHapMapFeature)f).getSampleIDs()));
else
samples.addAll(vc.getSampleNames());
}

View File

@ -40,7 +40,7 @@ import java.util.Arrays;
* in the file
*
*/
public class HapMapCodec implements FeatureCodec {
public class RawHapMapCodec implements FeatureCodec {
// the minimum number of features in the HapMap file line
private static final int minimumFeatureCount = 11;
@ -67,7 +67,7 @@ public class HapMapCodec implements FeatureCodec {
throw new IllegalArgumentException("Unable to parse line " + line + ", the length of split features is less than the minimum of " + minimumFeatureCount);
// create a new feature given the array
return new HapMapFeature(array[0],
return new RawHapMapFeature(array[0],
array[1].split("/"),
array[2],
Long.valueOf(array[3]),
@ -83,7 +83,7 @@ public class HapMapCodec implements FeatureCodec {
}
public Class getFeatureType() {
return HapMapFeature.class;
return RawHapMapFeature.class;
}
public Object readHeader(LineReader reader) {

View File

@ -35,7 +35,7 @@ import java.util.Map;
* a feature returned by the HapMap Codec - it represents contig, position, name,
* alleles, other hapmap information, and genotypes for specified samples
*/
public class HapMapFeature implements Feature {
public class RawHapMapFeature implements Feature {
public static final String NULL_ALLELE_STRING = "-";
public static final String INSERTION = "I";
@ -72,19 +72,19 @@ public class HapMapFeature implements Feature {
* @param qccode ??
* @param genotypes a list of strings, representing the genotypes for the list of samples
*/
public HapMapFeature(String name,
String[] alleles,
String contig,
Long position,
Strand strand,
String assembly,
String center,
String protLSID,
String assayLSID,
String panelLSID,
String qccode,
String[] genotypes,
String headerLine) {
public RawHapMapFeature(String name,
String[] alleles,
String contig,
Long position,
Strand strand,
String assembly,
String center,
String protLSID,
String assayLSID,
String panelLSID,
String qccode,
String[] genotypes,
String headerLine) {
this.name = name;
this.alleles = alleles;
this.contig = contig;

View File

@ -76,7 +76,7 @@ public class VariantsToVCFIntegrationTest extends WalkerTest {
WalkerTest.WalkerTestSpec spec = new WalkerTest.WalkerTestSpec(
"-R " + b36KGReference +
" --variant:HapMap " + validationDataLocation + "rawHapMap.yri.chr1.txt" +
" --variant:RawHapMap " + validationDataLocation + "rawHapMap.yri.chr1.txt" +
" -T VariantsToVCF" +
" -L 1:1-1,000,000" +
" -o %s" +

View File

@ -49,12 +49,12 @@ public class HapMapUnitTest {
*/
@Test
public void testReadHeader() {
HapMapCodec codec = new HapMapCodec();
RawHapMapCodec codec = new RawHapMapCodec();
AsciiLineReader reader = getReader();
try {
String header = reader.readLine();
reader.close();
Assert.assertTrue(header.equals((String)codec.readHeader(getReader())));
Assert.assertTrue(header.equals(codec.readHeader(getReader())));
} catch (IOException e) {
Assert.fail("Unable to read from file " + hapMapFile);
}
@ -63,8 +63,8 @@ public class HapMapUnitTest {
@Test
public void testKnownRecordConversion() {
HapMapCodec codec = new HapMapCodec();
HapMapFeature feature = (HapMapFeature)codec.decode(knownLine);
RawHapMapCodec codec = new RawHapMapCodec();
RawHapMapFeature feature = (RawHapMapFeature)codec.decode(knownLine);
// check that the alleles are right
@ -110,16 +110,16 @@ public class HapMapUnitTest {
@Test
public void testReadCorrectNumberOfRecords() {
// setup the record for reading our 500 line file (499 records, 1 header line)
HapMapCodec codec = new HapMapCodec();
RawHapMapCodec codec = new RawHapMapCodec();
AsciiLineReader reader = getReader();
String line = null;
String line;
int count = 0;
try {
codec.readHeader(reader);
line = reader.readLine();
while (line != null) {
HapMapFeature feature = (HapMapFeature) codec.decode(line);
codec.decode(line);
line = reader.readLine();
++count;
}
@ -133,14 +133,14 @@ public class HapMapUnitTest {
@Test
public void testGetSampleNames() {
// setup the record for reading our 500 line file (499 records, 1 header line)
HapMapCodec codec = new HapMapCodec();
RawHapMapCodec codec = new RawHapMapCodec();
AsciiLineReader reader = getReader();
String line = null;
String line;
try {
codec.readHeader(reader);
line = reader.readLine();
HapMapFeature feature = (HapMapFeature) codec.decode(line);
RawHapMapFeature feature = (RawHapMapFeature) codec.decode(line);
Assert.assertEquals(feature.getSampleIDs().length,87);
} catch (IOException e) {