本文整理汇总了Java中htsjdk.variant.variantcontext.Genotype.getPloidy方法的典型用法代码示例。如果您正苦于以下问题:Java Genotype.getPloidy方法的具体用法?Java Genotype.getPloidy怎么用?Java Genotype.getPloidy使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类htsjdk.variant.variantcontext.Genotype
的用法示例。
在下文中一共展示了Genotype.getPloidy方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: add
import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
/**
* Add information from this Genotype to this band
* @param g a non-null Genotype with GQ and DP attributes
*/
public void add(final int pos, final Genotype g) {
if ( g == null ) throw new IllegalArgumentException("g cannot be null");
if ( ! g.hasGQ() ) throw new IllegalArgumentException("g must have GQ field");
if ( ! g.hasPL() ) throw new IllegalArgumentException("g must have PL field");
if ( pos != stop + 1 ) throw new IllegalArgumentException("adding genotype at pos " + pos + " isn't contiguous with previous stop " + stop);
if ( g.getPloidy() != ploidy)
throw new IllegalArgumentException("cannot add a genotype with a different ploidy: " + g.getPloidy() + " != " + ploidy);
if( minPLs == null )
minPLs = g.getPL();
else { // otherwise take the min with the provided genotype's PLs
final int[] PL = g.getPL();
if (PL.length != minPLs.length)
throw new IllegalStateException("trying to merge different PL array sizes: " + PL.length + " != " + minPLs.length);
for (int i = 0; i < PL.length; i++)
if (minPLs[i] > PL[i])
minPLs[i] = PL[i];
}
stop = pos;
GQs.add(Math.min(g.getGQ(), 99)); // cap the GQs by the max. of 99 emission
DPs.add(Math.max(g.getDP(),0));
}
示例2: reduceScopeCalculateLikelihoodSums
import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
@Override
protected void reduceScopeCalculateLikelihoodSums(final VariantContext vc, final int defaultPloidy, final LikelihoodSum[] likelihoodSums) {
final int numOriginalAltAlleles = likelihoodSums.length;
final GenotypesContext genotypes = vc.getGenotypes();
for (final Genotype genotype : genotypes.iterateInSampleNameOrder()) {
if (!genotype.hasPL())
continue;
final double[] gls = genotype.getLikelihoods().getAsVector();
if (MathUtils.sum(gls) >= GATKVariantContextUtils.SUM_GL_THRESH_NOCALL)
continue;
final int PLindexOfBestGL = MathUtils.maxElementIndex(gls);
final double bestToHomRefDiffGL = PLindexOfBestGL == PL_INDEX_OF_HOM_REF ? 0.0 : gls[PLindexOfBestGL] - gls[PL_INDEX_OF_HOM_REF];
final int declaredPloidy = genotype.getPloidy();
final int ploidy = declaredPloidy <= 0 ? defaultPloidy : declaredPloidy;
final int[] acCount = GeneralPloidyGenotypeLikelihoods.getAlleleCountFromPLIndex(1 + numOriginalAltAlleles, ploidy, PLindexOfBestGL);
// by convention, first count coming from getAlleleCountFromPLIndex comes from reference allele
for (int k = 1; k < acCount.length; k++)
if (acCount[k] > 0)
likelihoodSums[k - 1].sum += acCount[k] * bestToHomRefDiffGL;
}
}
示例3: getInstance
import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
/**
* Returns a AF calculator capable to handle a particular variant-context.
* @param variantContext the target context build.
* @param defaultPloidy the assumed ploidy in case that there is no a GT call present to determine it.
* @return never {@code null}
*/
public AFCalculator getInstance(final VariantContext variantContext, final int defaultPloidy, final int maximumAltAlleles) {
if (variantContext == null)
throw new IllegalArgumentException("variant context cannot be null");
final int sampleCount = variantContext.getNSamples();
if (sampleCount == 0)
return getInstance(defaultPloidy,maximumAltAlleles);
final GenotypesContext genotypes = variantContext.getGenotypes();
final Genotype firstGenotype = genotypes.get(0);
int ploidy = firstGenotype.getPloidy();
if (ploidy <= 0) ploidy = defaultPloidy;
for (int i = 1 ; i < sampleCount; i++) {
final Genotype genotype = genotypes.get(i);
final int declaredPloidy = genotype.getPloidy();
final int actualPloidy = declaredPloidy <= 0 ? defaultPloidy : declaredPloidy;
if (actualPloidy != ploidy) {
ploidy = AFCalculatorImplementation.UNBOUND_PLOIDY;
break;
}
}
return getInstance(ploidy,Math.min(variantContext.getNAlleles() - 1, maximumAltAlleles));
}
示例4: reduceScopeCalculateLikelihoodSums
import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
@Override
protected void reduceScopeCalculateLikelihoodSums(final VariantContext vc, final int defaultPloidy, final LikelihoodSum[] likelihoodSums) {
final int numOriginalAltAlleles = likelihoodSums.length;
final GenotypesContext genotypes = vc.getGenotypes();
for ( final Genotype genotype : genotypes.iterateInSampleNameOrder() ) {
if (!genotype.hasPL())
continue;
final double[] gls = genotype.getLikelihoods().getAsVector();
if (MathUtils.sum(gls) >= GaeaGvcfVariantContextUtils.SUM_GL_THRESH_NOCALL)
continue;
final int PLindexOfBestGL = MathUtils.maxElementIndex(gls);
final double bestToHomRefDiffGL = PLindexOfBestGL == PL_INDEX_OF_HOM_REF ? 0.0 : gls[PLindexOfBestGL] - gls[PL_INDEX_OF_HOM_REF];
final int declaredPloidy = genotype.getPloidy();
final int ploidy = declaredPloidy <= 0 ? defaultPloidy : declaredPloidy;
final int[] acCount = GeneralPloidyGenotypeLikelihoods.getAlleleCountFromPLIndex(1 + numOriginalAltAlleles, ploidy, PLindexOfBestGL);
// by convention, first count coming from getAlleleCountFromPLIndex comes from reference allele
for (int k=1; k < acCount.length;k++)
if (acCount[k] > 0 )
likelihoodSums[k-1].sum += acCount[k] * bestToHomRefDiffGL;
}
}
示例5: reduceScopeCalculateLikelihoodSums
import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
@Override
@Requires("vc != null && likelihoodSums != null")
protected void reduceScopeCalculateLikelihoodSums(final VariantContext vc, final int defaultPloidy, final LikelihoodSum[] likelihoodSums) {
final int numOriginalAltAlleles = likelihoodSums.length;
final GenotypesContext genotypes = vc.getGenotypes();
for ( final Genotype genotype : genotypes.iterateInSampleNameOrder() ) {
if (!genotype.hasPL())
continue;
final double[] gls = genotype.getLikelihoods().getAsVector();
if (GvcfMathUtils.sum(gls) >= GaeaGvcfVariantContextUtils.SUM_GL_THRESH_NOCALL)
continue;
final int PLindexOfBestGL = GvcfMathUtils.maxElementIndex(gls);
final double bestToHomRefDiffGL = PLindexOfBestGL == PL_INDEX_OF_HOM_REF ? 0.0 : gls[PLindexOfBestGL] - gls[PL_INDEX_OF_HOM_REF];
final int declaredPloidy = genotype.getPloidy();
final int ploidy = declaredPloidy <= 0 ? defaultPloidy : declaredPloidy;
final int[] acCount = GeneralPloidyGenotypeLikelihoods.getAlleleCountFromPLIndex(1 + numOriginalAltAlleles, ploidy, PLindexOfBestGL);
// by convention, first count coming from getAlleleCountFromPLIndex comes from reference allele
for (int k=1; k < acCount.length;k++)
if (acCount[k] > 0 )
likelihoodSums[k-1].sum += acCount[k] * bestToHomRefDiffGL;
}
}
示例6: totalPloidy
import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
/**
* Calculates the total ploidy of a variant context as the sum of all plodies across genotypes.
* @param vc the target variant context.
* @param defaultPloidy the default ploidy to be assume when there is no ploidy information for a genotype.
* @return never {@code null}.
*/
public static int totalPloidy(final VariantContext vc, final int defaultPloidy) {
if (vc == null)
throw new IllegalArgumentException("the vc provided cannot be null");
if (defaultPloidy < 0)
throw new IllegalArgumentException("the default ploidy must 0 or greater");
int result = 0;
for (final Genotype genotype : vc.getGenotypes()) {
final int declaredPloidy = genotype.getPloidy();
result += declaredPloidy <= 0 ? defaultPloidy : declaredPloidy;
}
return result;
}
示例7: subsetToRefOnly
import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
/**
* Subset the samples in VC to reference only information with ref call alleles
*
* Preserves DP if present
*
* @param vc the variant context to subset down to
* @param ploidy ploidy to use if a genotype doesn't have any alleles
* @return a GenotypesContext
*/
public static GenotypesContext subsetToRefOnly(final VariantContext vc, final int ploidy) {
if ( vc == null ) throw new IllegalArgumentException("vc cannot be null");
if ( ploidy < 1 ) throw new IllegalArgumentException("ploidy must be >= 1 but got " + ploidy);
// the genotypes with PLs
final GenotypesContext oldGTs = vc.getGenotypes();
// optimization: if no input genotypes, just exit
if (oldGTs.isEmpty()) return oldGTs;
// the new genotypes to create
final GenotypesContext newGTs = GenotypesContext.create(oldGTs.size());
final Allele ref = vc.getReference();
final List<Allele> diploidRefAlleles = Arrays.asList(ref, ref);
// create the new genotypes
for ( final Genotype g : vc.getGenotypes() ) {
final int gPloidy = g.getPloidy() == 0 ? ploidy : g.getPloidy();
final List<Allele> refAlleles = gPloidy == 2 ? diploidRefAlleles : Collections.nCopies(gPloidy, ref);
final GenotypeBuilder gb = new GenotypeBuilder(g.getSampleName(), refAlleles);
if ( g.hasDP() ) gb.DP(g.getDP());
if ( g.hasGQ() ) gb.GQ(g.getGQ());
newGTs.add(gb.make());
}
return newGTs;
}
示例8: mergeRefConfidenceGenotypes
import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
/**
* Merge into the context a new genotype represented by the given VariantContext for the provided list of target alleles.
* This method assumes that none of the alleles in the VC overlaps with any of the alleles in the set.
*
* @param mergedGenotypes the genotypes context to add to
* @param VC the Variant Context for the sample
* @param remappedAlleles the list of remapped alleles for the sample
* @param targetAlleles the list of target alleles
*/
private static void mergeRefConfidenceGenotypes(final GenotypesContext mergedGenotypes,
final VariantContext VC,
final List<Allele> remappedAlleles,
final List<Allele> targetAlleles) {
final int maximumPloidy = VC.getMaxPloidy(GATKVariantContextUtils.DEFAULT_PLOIDY);
// the map is different depending on the ploidy, so in order to keep this method flexible (mixed ploidies)
// we need to get a map done (lazily inside the loop) for each ploidy, up to the maximum possible.
final int[][] genotypeIndexMapsByPloidy = new int[maximumPloidy + 1][];
final int maximumAlleleCount = Math.max(remappedAlleles.size(),targetAlleles.size());
final int[] indexesOfRelevantAlleles = getIndexesOfRelevantAlleles(remappedAlleles, targetAlleles, VC.getStart());
for ( final Genotype g : VC.getGenotypes() ) {
final String name = g.getSampleName();
if ( mergedGenotypes.containsSample(name) )
continue;
final int ploidy = g.getPloidy();
final GenotypeBuilder genotypeBuilder = new GenotypeBuilder(g).alleles(GATKVariantContextUtils.noCallAlleles(g.getPloidy()));
if (g.hasPL()) {
// lazy initialization of the genotype index map by ploidy.
final int[] genotypeIndexMapByPloidy = genotypeIndexMapsByPloidy[ploidy] == null
? GenotypeLikelihoodCalculators.getInstance(ploidy, maximumAlleleCount).genotypeIndexMap(indexesOfRelevantAlleles)
: genotypeIndexMapsByPloidy[ploidy];
final int[] PLs = generatePL(g, genotypeIndexMapByPloidy);
final int[] AD = g.hasAD() ? generateAD(g.getAD(), indexesOfRelevantAlleles) : null;
genotypeBuilder.PL(PLs).AD(AD).noGQ();
}
mergedGenotypes.add(genotypeBuilder.make());
}
}
示例9: combineSinglePools
import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
/**
* Simple non-optimized version that combines GLs from several pools and produces global AF distribution.
*
* @param GLs Inputs genotypes context with per-pool GLs
* @param numAlleles Number of alternate alleles
* @param log10AlleleFrequencyPriors Frequency priors
*/
protected void combineSinglePools(final GenotypesContext GLs,
final int defaultPloidy,
final int numAlleles,
final double[] log10AlleleFrequencyPriors) {
// Combine each pool incrementally - likelihoods will be renormalized at each step
// first element: zero ploidy, e.g. trivial degenerate distribution
final int numAltAlleles = numAlleles - 1;
final int[] zeroCounts = new int[numAlleles];
final ExactACset set = new ExactACset(1, new ExactACcounts(zeroCounts));
set.getLog10Likelihoods()[0] = 0.0;
final StateTracker stateTracker = getStateTracker(false, numAltAlleles);
int combinedPloidy = 0;
CombinedPoolLikelihoods combinedPoolLikelihoods = new CombinedPoolLikelihoods();
combinedPoolLikelihoods.add(set);
for (final Genotype genotype : GLs.iterateInSampleNameOrder()) {
// recover gls and check if they qualify.
if (!genotype.hasPL())
continue;
final double[] gls = genotype.getLikelihoods().getAsVector();
if (MathUtils.sum(gls) >= GATKVariantContextUtils.SUM_GL_THRESH_NOCALL)
continue;
stateTracker.reset();
final int declaredPloidy = genotype.getPloidy();
final int ploidy = declaredPloidy < 1 ? defaultPloidy : declaredPloidy;
// they do qualify so we proceed.
combinedPoolLikelihoods = fastCombineMultiallelicPool(combinedPoolLikelihoods, gls,
combinedPloidy, ploidy, numAlleles, log10AlleleFrequencyPriors, stateTracker);
combinedPloidy = ploidy + combinedPloidy; // total number of chromosomes in combinedLikelihoods
}
if (combinedPloidy == 0)
stateTracker.setLog10LikelihoodOfAFzero(0.0);
}
示例10: combineSinglePools
import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
/**
* Simple non-optimized version that combines GLs from several pools and produces global AF distribution.
* @param GLs Inputs genotypes context with per-pool GLs
* @param numAlleles Number of alternate alleles
* @param log10AlleleFrequencyPriors Frequency priors
*/
protected void combineSinglePools(final GenotypesContext GLs,
final int defaultPloidy,
final int numAlleles,
final double[] log10AlleleFrequencyPriors) {
// Combine each pool incrementally - likelihoods will be renormalized at each step
// first element: zero ploidy, e.g. trivial degenerate distribution
final int numAltAlleles = numAlleles - 1;
final int[] zeroCounts = new int[numAlleles];
final ExactACset set = new ExactACset(1, new ExactACcounts(zeroCounts));
set.getLog10Likelihoods()[0] = 0.0;
final StateTracker stateTracker = getStateTracker(false,numAltAlleles);
int combinedPloidy = 0;
CombinedPoolLikelihoods combinedPoolLikelihoods = new CombinedPoolLikelihoods();
combinedPoolLikelihoods.add(set);
for (final Genotype genotype : GLs.iterateInSampleNameOrder()) {
// recover gls and check if they qualify.
if (!genotype.hasPL())
continue;
final double[] gls = genotype.getLikelihoods().getAsVector();
if (MathUtils.sum(gls) >= GaeaGvcfVariantContextUtils.SUM_GL_THRESH_NOCALL)
continue;
stateTracker.reset();
final int declaredPloidy = genotype.getPloidy();
final int ploidy = declaredPloidy < 1 ? defaultPloidy : declaredPloidy;
// they do qualify so we proceed.
combinedPoolLikelihoods = fastCombineMultiallelicPool(combinedPoolLikelihoods, gls,
combinedPloidy, ploidy, numAlleles, log10AlleleFrequencyPriors, stateTracker);
combinedPloidy = ploidy + combinedPloidy; // total number of chromosomes in combinedLikelihoods
}
if (combinedPloidy == 0)
stateTracker.setLog10LikelihoodOfAFzero(0.0);
}
示例11: subsetGenotypeAlleles
import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
/**
* From a given genotype, extract a given subset of alleles and update genotype PLs and SACs.
* @param g genotype to subset
* @param allelesToUse alleles to subset
* @param vc variant context with alleles and genotypes
* @param defaultPloidy ploidy to assume in case that {@code vc} does not contain that information for a sample.
* @param assignGenotypes true: assign hard genotypes, false: leave as no-call
* @return Genotypes with new PLs and SACs
*/
private Genotype subsetGenotypeAlleles(final Genotype g, final List<Allele> allelesToUse, final VariantContext vc, final int defaultPloidy,
boolean assignGenotypes) {
final int ploidy = g.getPloidy() <= 0 ? defaultPloidy : g.getPloidy();
if (!g.hasLikelihoods())
return GenotypeBuilder.create(g.getSampleName(),GaeaGvcfVariantContextUtils.noCallAlleles(ploidy));
else {
// subset likelihood alleles
final double[] newLikelihoods = subsetLikelihoodAlleles(g, allelesToUse, vc, ploidy);
if (MathUtils.sum(newLikelihoods) > GaeaGvcfVariantContextUtils.SUM_GL_THRESH_NOCALL)
return GenotypeBuilder.create(g.getSampleName(), GaeaGvcfVariantContextUtils.noCallAlleles(ploidy));
else // just now we would care about newSACs
return subsetGenotypeAllelesWithLikelihoods(g, allelesToUse, vc, ploidy, assignGenotypes, newLikelihoods);
}
}
示例12: totalPloidy
import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
/**
* Calculates the total ploidy of a variant context as the sum of all
* ploidies across genotypes.
*
* @param vc
* the target variant context.
* @param defaultPloidy
* the default ploidy to be assume when there is no ploidy
* information for a genotype.
* @return never {@code null}.
*/
public static int totalPloidy(final VariantContext vc, final int defaultPloidy) {
if (vc == null)
throw new IllegalArgumentException("the vc provided cannot be null");
if (defaultPloidy < 0)
throw new IllegalArgumentException("the default ploidy must 0 or greater");
int result = 0;
for (final Genotype genotype : vc.getGenotypes()) {
final int declaredPloidy = genotype.getPloidy();
result += declaredPloidy <= 0 ? defaultPloidy : declaredPloidy;
}
return result;
}
示例13: subsetToRefOnly
import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
/**
* Subset the samples in VC to reference only information with ref call
* alleles
*
* Preserves DP if present
*
* @param vc
* the variant context to subset down to
* @param ploidy
* ploidy to use if a genotype doesn't have any alleles
* @return a GenotypesContext
*/
public static GenotypesContext subsetToRefOnly(final VariantContext vc, final int ploidy) {
if (vc == null)
throw new IllegalArgumentException("vc cannot be null");
if (ploidy < 1)
throw new IllegalArgumentException("ploidy must be >= 1 but got " + ploidy);
// the genotypes with PLs
final GenotypesContext oldGTs = vc.getGenotypes();
// optimization: if no input genotypes, just exit
if (oldGTs.isEmpty())
return oldGTs;
// the new genotypes to create
final GenotypesContext newGTs = GenotypesContext.create(oldGTs.size());
final Allele ref = vc.getReference();
final List<Allele> diploidRefAlleles = Arrays.asList(ref, ref);
// create the new genotypes
for (final Genotype g : vc.getGenotypes()) {
final int gPloidy = g.getPloidy() == 0 ? ploidy : g.getPloidy();
final List<Allele> refAlleles = Collections.nCopies(gPloidy, vc.getReference());
final GenotypeBuilder gb = new GenotypeBuilder(g.getSampleName(), refAlleles);
if (g.hasDP())
gb.DP(g.getDP());
if (g.hasGQ())
gb.GQ(g.getGQ());
newGTs.add(gb.make());
}
return newGTs;
}
示例14: genotypeCanBeMergedInCurrentBlock
import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
private boolean genotypeCanBeMergedInCurrentBlock(final Genotype g) {
return currentBlock != null && currentBlock.withinBounds(g.getGQ()) && currentBlock.getPloidy() == g.getPloidy()
&& (currentBlock.getMinPLs() == null || !g.hasPL() || (currentBlock.getMinPLs().length == g.getPL().length));
}
示例15: cleanupGenotypeAnnotations
import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
/**
* Cleans up genotype-level annotations that need to be updated.
* 1. move MIN_DP to DP if present
* 2. propagate DP to AD if not present
* 3. remove SB if present
* 4. change the PGT value from "0|1" to "1|1" for homozygous variant genotypes
*
* @param VC the VariantContext with the Genotypes to fix
* @param createRefGTs if true we will also create proper hom ref genotypes since we assume the site is monomorphic
* @return a new set of Genotypes
*/
private List<Genotype> cleanupGenotypeAnnotations(final VariantContext VC, final boolean createRefGTs) {
final GenotypesContext oldGTs = VC.getGenotypes();
final List<Genotype> recoveredGs = new ArrayList<>(oldGTs.size());
for ( final Genotype oldGT : oldGTs ) {
final Map<String, Object> attrs = new HashMap<>(oldGT.getExtendedAttributes());
final GenotypeBuilder builder = new GenotypeBuilder(oldGT);
int depth = oldGT.hasDP() ? oldGT.getDP() : 0;
// move the MIN_DP to DP
if ( oldGT.hasExtendedAttribute("MIN_DP") ) {
depth = Integer.parseInt((String)oldGT.getAnyAttribute("MIN_DP"));
builder.DP(depth);
attrs.remove("MIN_DP");
}
// remove SB
attrs.remove("SB");
// update PGT for hom vars
if ( oldGT.isHomVar() && oldGT.hasExtendedAttribute(HaplotypeCaller.HAPLOTYPE_CALLER_PHASING_GT_KEY) ) {
attrs.put(HaplotypeCaller.HAPLOTYPE_CALLER_PHASING_GT_KEY, "1|1");
}
// create AD if it's not there
if ( !oldGT.hasAD() && VC.isVariant() ) {
final int[] AD = new int[VC.getNAlleles()];
AD[0] = depth;
builder.AD(AD);
}
if ( createRefGTs ) {
final int ploidy = oldGT.getPloidy();
final List<Allele> refAlleles = Collections.nCopies(ploidy,VC.getReference());
//keep 0 depth samples as no-call
if (depth > 0) {
builder.alleles(refAlleles);
}
// also, the PLs are technically no longer usable
builder.noPL();
}
recoveredGs.add(builder.noAttributes().attributes(attrs).make());
}
return recoveredGs;
}