本文整理汇总了Java中htsjdk.variant.variantcontext.GenotypeBuilder.AD属性的典型用法代码示例。如果您正苦于以下问题:Java GenotypeBuilder.AD属性的具体用法?Java GenotypeBuilder.AD怎么用?Java GenotypeBuilder.AD使用的例子?那么恭喜您, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在类htsjdk.variant.variantcontext.GenotypeBuilder
的用法示例。
在下文中一共展示了GenotypeBuilder.AD属性的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: fixAD
/**
* Fix the AD for the given Genotype
*
* @param genotype the original Genotype
* @param alleleIndexesToUse a bitset describing whether or not to keep a given index
* @param nAllelesToUse how many alleles we are keeping
* @return a non-null Genotype
*/
private static Genotype fixAD(final Genotype genotype, final boolean[] alleleIndexesToUse, final int nAllelesToUse) {
// if it ain't broke don't fix it
if ( !genotype.hasAD() )
return genotype;
final GenotypeBuilder builder = new GenotypeBuilder(genotype);
final int[] oldAD = genotype.getAD();
if ( oldAD.length != alleleIndexesToUse.length ) {
builder.noAD();
} else {
final int[] newAD = new int[nAllelesToUse];
int currentIndex = 0;
for ( int i = 0; i < oldAD.length; i++ ) {
if ( alleleIndexesToUse[i] )
newAD[currentIndex++] = oldAD[i];
}
builder.AD(newAD);
}
return builder.make();
}
示例2: annotateWithPileup
private void annotateWithPileup(final AlignmentContext stratifiedContext, final VariantContext vc, final GenotypeBuilder gb) {
final HashMap<Byte, Integer> alleleCounts = new HashMap<>();
for ( final Allele allele : vc.getAlleles() )
alleleCounts.put(allele.getBases()[0], 0);
final ReadBackedPileup pileup = stratifiedContext.getBasePileup();
for ( final PileupElement p : pileup ) {
if ( alleleCounts.containsKey(p.getBase()) )
alleleCounts.put(p.getBase(), alleleCounts.get(p.getBase())+1);
}
// we need to add counts in the correct order
final int[] counts = new int[alleleCounts.size()];
counts[0] = alleleCounts.get(vc.getReference().getBases()[0]);
for (int i = 0; i < vc.getAlternateAlleles().size(); i++)
counts[i+1] = alleleCounts.get(vc.getAlternateAllele(i).getBases()[0]);
gb.AD(counts);
}
示例3: annotateWithLikelihoods
private void annotateWithLikelihoods(final PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap, final VariantContext vc, final GenotypeBuilder gb) {
final Set<Allele> alleles = new HashSet<>(vc.getAlleles());
// make sure that there's a meaningful relationship between the alleles in the perReadAlleleLikelihoodMap and our VariantContext
if ( ! perReadAlleleLikelihoodMap.getAllelesSet().containsAll(alleles) )
throw new IllegalStateException("VC alleles " + alleles + " not a strict subset of per read allele map alleles " + perReadAlleleLikelihoodMap.getAllelesSet());
final HashMap<Allele, Integer> alleleCounts = new HashMap<>();
for ( final Allele allele : vc.getAlleles() ) { alleleCounts.put(allele, 0); }
for ( final Map.Entry<GATKSAMRecord,Map<Allele,Double>> el : perReadAlleleLikelihoodMap.getLikelihoodReadMap().entrySet()) {
final MostLikelyAllele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue(), alleles);
if (! a.isInformative() ) continue; // read is non-informative
final GATKSAMRecord read = el.getKey();
final int prevCount = alleleCounts.get(a.getMostLikelyAllele());
alleleCounts.put(a.getMostLikelyAllele(), prevCount + 1);
}
final int[] counts = new int[alleleCounts.size()];
counts[0] = alleleCounts.get(vc.getReference());
for (int i = 0; i < vc.getAlternateAlleles().size(); i++)
counts[i+1] = alleleCounts.get( vc.getAlternateAllele(i) );
gb.AD(counts);
}
示例4: annotateWithPileup
private void annotateWithPileup(final Pileup pileup, final VariantContext vc, final GenotypeBuilder gb) {
//System.out.println("annotated with pileup");
HashMap<Byte, Integer> alleleCounts = new HashMap<Byte, Integer>();
for ( Allele allele : vc.getAlleles() ) {
alleleCounts.put(allele.getBases()[0], 0);
//System.out.println("genotype allele:" + allele.getBases()[0]);
}
for ( PileupReadInfo p : pileup.getTotalPileup() ) {
if ( alleleCounts.containsKey(p.getByteBase()) ) {
//System.out.println("pileup allele:" + p.getBase());
alleleCounts.put(p.getByteBase(), alleleCounts.get(p.getByteBase())+1);
}
}
// we need to add counts in the correct order
int[] counts = new int[alleleCounts.size()];
counts[0] = alleleCounts.get(vc.getReference().getBases()[0]);
for (int i = 0; i < vc.getAlternateAlleles().size(); i++)
counts[i+1] = alleleCounts.get(vc.getAlternateAllele(i).getBases()[0]);
gb.AD(counts);
}
示例5: annotateWithLikelihoods
private void annotateWithLikelihoods(final PerReadAlleleLikelihoodMap perReadAlleleLikelihoodMap, final VariantContext vc, final GenotypeBuilder gb) {
final HashMap<Allele, Integer> alleleCounts = new HashMap<Allele, Integer>();
//System.out.println("vc allele:");
for ( final Allele allele : vc.getAlleles() ) {
alleleCounts.put(allele, 0);
//System.out.println(allele);
}
Map<Allele, Allele> alleles = getExtendedAllele(perReadAlleleLikelihoodMap.getRefAllele(), vc);
/* System.out.println("recal vc allele:");
for ( final Allele allele : alleles.keySet() ) {
System.out.println(allele);
}*/
for (Map.Entry<AlignmentsBasic,Map<Allele,Double>> el : perReadAlleleLikelihoodMap.getLikelihoodReadMap().entrySet()) {
final AlignmentsBasic read = el.getKey();
final Allele a = PerReadAlleleLikelihoodMap.getMostLikelyAllele(el.getValue());
//System.out.println(read.getReadName() + "\t" + a.getBaseString());
if (a.isNoCall())
continue; // read is non-informative
if (!alleles.keySet().contains(a))
continue; // sanity check - shouldn't be needed
Allele ab = alleles.get(a);
alleleCounts.put(ab, alleleCounts.get(ab) + 1);
}
final int[] counts = new int[alleleCounts.size()];
counts[0] = alleleCounts.get(vc.getReference());
for (int i = 0; i < vc.getAlternateAlleles().size(); i++)
counts[i+1] = alleleCounts.get( vc.getAlternateAllele(i) );
gb.AD(counts);
}
示例6: annotate
@Override
public void annotate(final ReferenceContext ref,
final VariantContext vc,
final Genotype g,
final GenotypeBuilder gb,
final ReadLikelihoods<Allele> likelihoods) {
Utils.nonNull(gb, "gb is null");
Utils.nonNull(vc, "vc is null");
if ( g == null || !g.isCalled() || likelihoods == null) {
return;
}
final Set<Allele> alleles = new LinkedHashSet<>(vc.getAlleles());
// make sure that there's a meaningful relationship between the alleles in the likelihoods and our VariantContext
Utils.validateArg(likelihoods.alleles().containsAll(alleles), () -> "VC alleles " + alleles + " not a subset of ReadLikelihoods alleles " + likelihoods.alleles());
final Map<Allele, Integer> alleleCounts = new LinkedHashMap<>();
for ( final Allele allele : vc.getAlleles() ) {
alleleCounts.put(allele, 0);
}
final Map<Allele, List<Allele>> alleleSubset = alleles.stream().collect(Collectors.toMap(a -> a, Arrays::asList));
final ReadLikelihoods<Allele> subsettedLikelihoods = likelihoods.marginalize(alleleSubset);
subsettedLikelihoods.bestAlleles(g.getSampleName()).stream()
.filter(ba -> ba.isInformative())
.forEach(ba -> alleleCounts.compute(ba.allele, (allele,prevCount) -> prevCount + 1));
final int[] counts = new int[alleleCounts.size()];
counts[0] = alleleCounts.get(vc.getReference()); //first one in AD is always ref
for (int i = 0; i < vc.getAlternateAlleles().size(); i++) {
counts[i + 1] = alleleCounts.get(vc.getAlternateAllele(i));
}
gb.AD(counts);
}
示例7: entryToObject
@Override
public Genotype entryToObject(TupleInput in)
{
GenotypeBuilder gb=new GenotypeBuilder(in.readString());
if(in.readBoolean()) gb.DP(in.readInt());
if(in.readBoolean()) gb.AD(arrayOfIntToEntry(in));
if(in.readBoolean()) gb.GQ(in.readInt());
if(in.readBoolean()) gb.PL(arrayOfIntToEntry(in));
/* ALLELES ======================================== */
int n=in.readInt();
List<Allele> alleles=new ArrayList<Allele>(n);
for(int i=0;i< n;++i)
{
alleles.add(this.alleleBinding.entryToObject(in));
}
gb.alleles(alleles);
/* ATTRIBUTES ===================================== */
n=in.readInt();
for(int i=0;i< n;++i)
{
String key=in.readString();
gb.attribute(key, super.readAttribute(in));
}
return gb.make();
}
示例8: cleanupGenotypeAnnotations
/**
* Cleans up genotype-level annotations that need to be updated.
* 1. move MIN_DP to DP if present
* 2. propagate DP to AD if not present
* 3. remove SB if present
* 4. change the PGT value from "0|1" to "1|1" for homozygous variant genotypes
*
* @param VC the VariantContext with the Genotypes to fix
* @param createRefGTs if true we will also create proper hom ref genotypes since we assume the site is monomorphic
* @return a new set of Genotypes
*/
private List<Genotype> cleanupGenotypeAnnotations(final VariantContext VC, final boolean createRefGTs) {
final GenotypesContext oldGTs = VC.getGenotypes();
final List<Genotype> recoveredGs = new ArrayList<>(oldGTs.size());
for ( final Genotype oldGT : oldGTs ) {
final Map<String, Object> attrs = new HashMap<>(oldGT.getExtendedAttributes());
final GenotypeBuilder builder = new GenotypeBuilder(oldGT);
int depth = oldGT.hasDP() ? oldGT.getDP() : 0;
// move the MIN_DP to DP
if ( oldGT.hasExtendedAttribute("MIN_DP") ) {
depth = Integer.parseInt((String)oldGT.getAnyAttribute("MIN_DP"));
builder.DP(depth);
attrs.remove("MIN_DP");
}
// remove SB
attrs.remove("SB");
// update PGT for hom vars
if ( oldGT.isHomVar() && oldGT.hasExtendedAttribute(HaplotypeCaller.HAPLOTYPE_CALLER_PHASING_GT_KEY) ) {
attrs.put(HaplotypeCaller.HAPLOTYPE_CALLER_PHASING_GT_KEY, "1|1");
}
// create AD if it's not there
if ( !oldGT.hasAD() && VC.isVariant() ) {
final int[] AD = new int[VC.getNAlleles()];
AD[0] = depth;
builder.AD(AD);
}
if ( createRefGTs ) {
final int ploidy = oldGT.getPloidy();
final List<Allele> refAlleles = Collections.nCopies(ploidy,VC.getReference());
//keep 0 depth samples as no-call
if (depth > 0) {
builder.alleles(refAlleles);
}
// also, the PLs are technically no longer usable
builder.noPL();
}
recoveredGs.add(builder.noAttributes().attributes(attrs).make());
}
return recoveredGs;
}
示例9: cleanupGenotypeAnnotations
private List<Genotype> cleanupGenotypeAnnotations(final VariantContext VC, final boolean createRefGTs) {
final GenotypesContext oldGTs = VC.getGenotypes();
final List<Genotype> recoveredGs = new ArrayList<>(oldGTs.size());
for (final Genotype oldGT : oldGTs) {
final Map<String, Object> attrs = new HashMap<>(oldGT.getExtendedAttributes());
final GenotypeBuilder builder = new GenotypeBuilder(oldGT);
int depth = oldGT.hasDP() ? oldGT.getDP() : 0;
// move the MIN_DP to DP
if (oldGT.hasExtendedAttribute(GaeaVCFConstants.MIN_DP_FORMAT_KEY)) {
depth = Integer.parseInt((String) oldGT.getAnyAttribute(GaeaVCFConstants.MIN_DP_FORMAT_KEY));
builder.DP(depth);
attrs.remove(GaeaVCFConstants.MIN_DP_FORMAT_KEY);
}
// move the GQ to RGQ
if (createRefGTs && oldGT.hasGQ()) {
builder.noGQ();
attrs.put(GaeaVCFConstants.REFERENCE_GENOTYPE_QUALITY, oldGT.getGQ());
}
// remove SB
attrs.remove(GaeaVCFConstants.STRAND_BIAS_BY_SAMPLE_KEY);
// update PGT for hom vars
if (oldGT.isHomVar() && oldGT.hasExtendedAttribute(GaeaVCFConstants.HAPLOTYPE_CALLER_PHASING_GT_KEY)) {
attrs.put(GaeaVCFConstants.HAPLOTYPE_CALLER_PHASING_GT_KEY, "1|1");
}
// create AD if it's not there
if (!oldGT.hasAD() && VC.isVariant()) {
final int[] AD = new int[VC.getNAlleles()];
AD[0] = depth;
builder.AD(AD);
}
if (createRefGTs) {
final int ploidy = oldGT.getPloidy();
final List<Allele> refAlleles = Collections.nCopies(ploidy, VC.getReference());
// keep 0 depth samples and 0 GQ samples as no-call
if (depth > 0 && oldGT.hasGQ() && oldGT.getGQ() > 0) {
builder.alleles(refAlleles);
}
// also, the PLs are technically no longer usable
builder.noPL();
}
recoveredGs.add(builder.noAttributes().attributes(attrs).make());
}
return recoveredGs;
}
示例10: mergeRefConfidenceGenotypes
/**
* Merge into the context a new genotype represented by the given
* VariantContext for the provided list of target alleles. This method
* assumes that none of the alleles in the VC overlaps with any of the
* alleles in the set.
*/
private static void mergeRefConfidenceGenotypes(final GenotypesContext mergedGenotypes, final VariantContext vc,
final List<Allele> remappedAlleles, final List<Allele> targetAlleles, final boolean samplesAreUniquified,
final boolean shouldComputePLs) {
final int maximumPloidy = vc.getMaxPloidy(GaeaGvcfVariantContextUtils.DEFAULT_PLOIDY);
// the map is different depending on the ploidy, so in order to keep
// this method flexible (mixed ploidies)
// we need to get a map done (lazily inside the loop) for each ploidy,
// up to the maximum possible.
final int[][] genotypeIndexMapsByPloidy = new int[maximumPloidy + 1][];
final int maximumAlleleCount = Math.max(remappedAlleles.size(), targetAlleles.size());
for (final Genotype g : vc.getGenotypes()) {
final String name;
if (samplesAreUniquified)
name = g.getSampleName() + "." + vc.getSource();
else
name = g.getSampleName();
final int ploidy = g.getPloidy();
final GenotypeBuilder genotypeBuilder = new GenotypeBuilder(g)
.alleles(GaeaGvcfVariantContextUtils.noCallAlleles(g.getPloidy())).noPL();
genotypeBuilder.name(name);
final boolean doPLs = shouldComputePLs && g.hasPL();
final boolean hasAD = g.hasAD();
final boolean hasSAC = g.hasExtendedAttribute(GaeaVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY);
if (doPLs || hasSAC || hasAD) {
final int[] perSampleIndexesOfRelevantAlleles = getIndexesOfRelevantAlleles(remappedAlleles,
targetAlleles, vc.getStart(), g);
if (doPLs) {
// lazy initialization of the genotype index map by ploidy.
final int[] genotypeIndexMapByPloidy = genotypeIndexMapsByPloidy[ploidy] == null
? GenotypeLikelihoodCalculators.getInstance(ploidy, maximumAlleleCount).genotypeIndexMap(
perSampleIndexesOfRelevantAlleles)
: genotypeIndexMapsByPloidy[ploidy];
final int[] PLs = generatePL(g, genotypeIndexMapByPloidy);
genotypeBuilder.PL(PLs);
}
if (hasAD) {
genotypeBuilder.AD(generateAD(g.getAD(), perSampleIndexesOfRelevantAlleles));
}
if (hasSAC) {
final List<Integer> sacIndexesToUse = adaptToSACIndexes(perSampleIndexesOfRelevantAlleles);
final int[] SACs = GaeaGvcfVariantContextUtils.makeNewSACs(g, sacIndexesToUse);
genotypeBuilder.attribute(GaeaVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, SACs);
}
}
mergedGenotypes.add(genotypeBuilder.make());
}
}