Code cleanup before major refactor

This commit is contained in:
Mark DePristo 2012-06-02 20:21:27 -04:00
parent cebd37609c
commit 889e3c4583
2 changed files with 0 additions and 73 deletions

View File

@ -56,78 +56,6 @@ class BCF2LazyGenotypesDecoder implements LazyGenotypesContext.LazyParser {
this.nFields = nFields;
}
// @Override
// public LazyGenotypesContext.LazyData parse(final Object data) {
// logger.info("Decoding BCF genotypes for " + nSamples + " samples with " + nFields + " fields each");
//
// // load our bytep[] data into the decoder
// final BCF2Decoder decoder = new BCF2Decoder((byte[])data);
//
// // go ahead and decode everyone
// final List<String> samples = new ArrayList<String>(codec.getHeader().getGenotypeSamples());
//
// if ( samples.size() != nSamples )
// throw new UserException.MalformedBCF2("GATK currently doesn't support reading BCF2 files with " +
// "different numbers of samples per record. Saw " + samples.size() +
// " samples in header but have a record with " + nSamples + " samples");
//
// final Map<String, List<Object>> fieldValues = decodeGenotypeFieldValues(decoder, nFields, nSamples);
// final ArrayList<Genotype> genotypes = new ArrayList<Genotype>(nSamples);
// for ( int i = 0; i < nSamples; i++ ) {
// // all of the information we need for each genotype, with default values
// final String sampleName = samples.get(i);
// List<Allele> alleles = null;
// boolean isPhased = false;
// double log10PError = VariantContext.NO_LOG10_PERROR;
// Set<String> filters = null;
// Map<String, Object> attributes = null;
// double[] log10Likelihoods = null;
//
// for ( final Map.Entry<String, List<Object>> entry : fieldValues.entrySet() ) {
// final String field = entry.getKey();
// Object value = entry.getValue().get(i);
// try {
// if ( field.equals(VCFConstants.GENOTYPE_KEY) ) {
// alleles = decodeGenotypeAlleles(siteAlleles, (List<Integer>)value);
// } else if ( field.equals(VCFConstants.GENOTYPE_QUALITY_KEY) ) {
// if ( value != BCF2Type.INT8.getMissingJavaValue() )
// log10PError = ((Integer)value) / -10.0;
// } else if ( field.equals(VCFConstants.PHRED_GENOTYPE_LIKELIHOODS_KEY) ) {
// final List<Integer> pls = (List<Integer>)value;
// if ( pls != null ) { // we have a PL field
// log10Likelihoods = new double[pls.size()];
// for ( int j = 0; j < log10Likelihoods.length; j++ ) {
// final double d = pls.get(j);
// log10Likelihoods[j] = d == -0.0 ? 0.0 : d / -10.0;
// }
// }
// } else if ( field.equals(VCFConstants.GENOTYPE_FILTER_KEY) ) {
// throw new ReviewedStingException("Genotype filters not implemented in GATK BCF2");
// //filters = new HashSet<String>(values.get(i));
// } else { // add to attributes
// if ( value != null ) { // don't add missing values
// if ( attributes == null ) attributes = new HashMap<String, Object>(nFields);
// if ( value instanceof List && ((List)value).size() == 1)
// value = ((List)value).get(0);
// attributes.put(field, value);
// }
// }
// } catch ( ClassCastException e ) {
// throw new UserException.MalformedBCF2("BUG: expected encoding of field " + field
// + " inconsistent with the value observed in the decoded value in the "
// + " BCF file. Value was " + value);
// }
// }
//
// if ( alleles == null ) throw new UserException.MalformedBCF2("BUG: no alleles found");
//
// final Genotype g = new Genotype(sampleName, alleles, log10PError, filters, attributes, isPhased, log10Likelihoods);
// genotypes.add(g);
// }
//
// return new LazyGenotypesContext.LazyData(genotypes, codec.getHeader().getSampleNamesInOrder(), codec.getHeader().getSampleNameToOffset());
// }
@Override
public LazyGenotypesContext.LazyData parse(final Object data) {
logger.info("Decoding BCF genotypes for " + nSamples + " samples with " + nFields + " fields each");

View File

@ -25,7 +25,6 @@
package org.broadinstitute.sting.utils.variantcontext;
import org.broad.tribble.util.ParsingUtils;
import org.broadinstitute.sting.utils.codecs.vcf.VCFConstants;
import org.broadinstitute.sting.utils.exceptions.ReviewedStingException;