当前位置: 首页>>代码示例>>Java>>正文


Java Genotype.hasPL方法代码示例

本文整理汇总了Java中htsjdk.variant.variantcontext.Genotype.hasPL方法的典型用法代码示例。如果您正苦于以下问题:Java Genotype.hasPL方法的具体用法?Java Genotype.hasPL怎么用?Java Genotype.hasPL使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在htsjdk.variant.variantcontext.Genotype的用法示例。


在下文中一共展示了Genotype.hasPL方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: add

import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
/**
 * Add information from this Genotype to this band
 * @param g a non-null Genotype with GQ and DP attributes
 */
public void add(final int pos, final Genotype g) {
    if ( g == null ) throw new IllegalArgumentException("g cannot be null");
    if ( ! g.hasGQ() ) throw new IllegalArgumentException("g must have GQ field");
    if ( ! g.hasPL() ) throw new IllegalArgumentException("g must have PL field");
    if ( pos != stop + 1 ) throw new IllegalArgumentException("adding genotype at pos " + pos + " isn't contiguous with previous stop " + stop);
    if ( g.getPloidy() != ploidy)
        throw new IllegalArgumentException("cannot add a genotype with a different ploidy: " + g.getPloidy() + " != " + ploidy);

    if( minPLs == null )
        minPLs = g.getPL();
    else { // otherwise take the min with the provided genotype's PLs
        final int[] PL = g.getPL();
        if (PL.length != minPLs.length)
            throw new IllegalStateException("trying to merge different PL array sizes: " + PL.length + " != " + minPLs.length);
        for (int i = 0; i < PL.length; i++)
            if (minPLs[i] > PL[i])
                minPLs[i] = PL[i];
    }
    stop = pos;
    GQs.add(Math.min(g.getGQ(), 99)); // cap the GQs by the max. of 99 emission
    DPs.add(Math.max(g.getDP(),0));
}
 
开发者ID:PAA-NCIC,项目名称:SparkSeq,代码行数:27,代码来源:HomRefBlock.java

示例2: reduceScopeCalculateLikelihoodSums

import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
@Override
protected void reduceScopeCalculateLikelihoodSums(final VariantContext vc, final int defaultPloidy, final LikelihoodSum[] likelihoodSums) {
    final int numOriginalAltAlleles = likelihoodSums.length;
    final GenotypesContext genotypes = vc.getGenotypes();
    for (final Genotype genotype : genotypes.iterateInSampleNameOrder()) {
        if (!genotype.hasPL())
            continue;
        final double[] gls = genotype.getLikelihoods().getAsVector();
        if (MathUtils.sum(gls) >= GATKVariantContextUtils.SUM_GL_THRESH_NOCALL)
            continue;

        final int PLindexOfBestGL = MathUtils.maxElementIndex(gls);

        final double bestToHomRefDiffGL = PLindexOfBestGL == PL_INDEX_OF_HOM_REF ? 0.0 : gls[PLindexOfBestGL] - gls[PL_INDEX_OF_HOM_REF];
        final int declaredPloidy = genotype.getPloidy();
        final int ploidy = declaredPloidy <= 0 ? defaultPloidy : declaredPloidy;

        final int[] acCount = GeneralPloidyGenotypeLikelihoods.getAlleleCountFromPLIndex(1 + numOriginalAltAlleles, ploidy, PLindexOfBestGL);
        // by convention, first count coming from getAlleleCountFromPLIndex comes from reference allele
        for (int k = 1; k < acCount.length; k++)
            if (acCount[k] > 0)
                likelihoodSums[k - 1].sum += acCount[k] * bestToHomRefDiffGL;
    }
}
 
开发者ID:PAA-NCIC,项目名称:SparkSeq,代码行数:25,代码来源:GeneralPloidyExactAFCalculator.java

示例3: reduceScopeCalculateLikelihoodSums

import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
@Override
protected void reduceScopeCalculateLikelihoodSums(final VariantContext vc, final int defaultPloidy, final LikelihoodSum[] likelihoodSums) {
    final int numOriginalAltAlleles = likelihoodSums.length;
    final GenotypesContext genotypes = vc.getGenotypes();
    for ( final Genotype genotype : genotypes.iterateInSampleNameOrder() ) {
        if (!genotype.hasPL())
            continue;
        final double[] gls = genotype.getLikelihoods().getAsVector();
        if (MathUtils.sum(gls) >= GaeaGvcfVariantContextUtils.SUM_GL_THRESH_NOCALL)
            continue;

        final int PLindexOfBestGL = MathUtils.maxElementIndex(gls);

        final double bestToHomRefDiffGL = PLindexOfBestGL == PL_INDEX_OF_HOM_REF ? 0.0 : gls[PLindexOfBestGL] - gls[PL_INDEX_OF_HOM_REF];
        final int declaredPloidy = genotype.getPloidy();
        final int ploidy = declaredPloidy <= 0 ? defaultPloidy : declaredPloidy;

        final int[] acCount = GeneralPloidyGenotypeLikelihoods.getAlleleCountFromPLIndex(1 + numOriginalAltAlleles, ploidy, PLindexOfBestGL);
        // by convention, first count coming from getAlleleCountFromPLIndex comes from reference allele
        for (int k=1; k < acCount.length;k++)
            if (acCount[k] > 0 )
                likelihoodSums[k-1].sum += acCount[k] * bestToHomRefDiffGL;
    }
}
 
开发者ID:BGI-flexlab,项目名称:SOAPgaea,代码行数:25,代码来源:GeneralPloidyExactAFCalculator.java

示例4: reduceScopeCalculateLikelihoodSums

import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
@Override
@Requires("vc != null && likelihoodSums != null")
protected void reduceScopeCalculateLikelihoodSums(final VariantContext vc, final int defaultPloidy, final LikelihoodSum[] likelihoodSums) {
    final int numOriginalAltAlleles = likelihoodSums.length;
    final GenotypesContext genotypes = vc.getGenotypes();
    for ( final Genotype genotype : genotypes.iterateInSampleNameOrder() ) {
        if (!genotype.hasPL())
            continue;
        final double[] gls = genotype.getLikelihoods().getAsVector();
        if (GvcfMathUtils.sum(gls) >= GaeaGvcfVariantContextUtils.SUM_GL_THRESH_NOCALL)
            continue;

        final int PLindexOfBestGL = GvcfMathUtils.maxElementIndex(gls);

        final double bestToHomRefDiffGL = PLindexOfBestGL == PL_INDEX_OF_HOM_REF ? 0.0 : gls[PLindexOfBestGL] - gls[PL_INDEX_OF_HOM_REF];
        final int declaredPloidy = genotype.getPloidy();
        final int ploidy = declaredPloidy <= 0 ? defaultPloidy : declaredPloidy;

        final int[] acCount = GeneralPloidyGenotypeLikelihoods.getAlleleCountFromPLIndex(1 + numOriginalAltAlleles, ploidy, PLindexOfBestGL);
        // by convention, first count coming from getAlleleCountFromPLIndex comes from reference allele
        for (int k=1; k < acCount.length;k++)
            if (acCount[k] > 0 )
                likelihoodSums[k-1].sum += acCount[k] * bestToHomRefDiffGL;
    }
}
 
开发者ID:BGI-flexlab,项目名称:SOAPgaea,代码行数:26,代码来源:IndependentAllelesExactAFCalculator.java

示例5: mergeRefConfidenceGenotypes

import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
/**
 * Merge into the context a new genotype represented by the given VariantContext for the provided list of target alleles.
 * This method assumes that none of the alleles in the VC overlaps with any of the alleles in the set.
 *
 * @param mergedGenotypes   the genotypes context to add to
 * @param VC                the Variant Context for the sample
 * @param remappedAlleles   the list of remapped alleles for the sample
 * @param targetAlleles     the list of target alleles
 */
private static void mergeRefConfidenceGenotypes(final GenotypesContext mergedGenotypes,
                                                final VariantContext VC,
                                                final List<Allele> remappedAlleles,
                                                final List<Allele> targetAlleles) {
    final int maximumPloidy = VC.getMaxPloidy(GATKVariantContextUtils.DEFAULT_PLOIDY);
    // the map is different depending on the ploidy, so in order to keep this method flexible (mixed ploidies)
    // we need to get a map done (lazily inside the loop) for each ploidy, up to the maximum possible.
    final int[][] genotypeIndexMapsByPloidy = new int[maximumPloidy + 1][];
    final int maximumAlleleCount = Math.max(remappedAlleles.size(),targetAlleles.size());
    final int[] indexesOfRelevantAlleles = getIndexesOfRelevantAlleles(remappedAlleles, targetAlleles, VC.getStart());

    for ( final Genotype g : VC.getGenotypes() ) {
        final String name = g.getSampleName();
        if ( mergedGenotypes.containsSample(name) )
            continue;
        final int ploidy = g.getPloidy();
        final GenotypeBuilder genotypeBuilder = new GenotypeBuilder(g).alleles(GATKVariantContextUtils.noCallAlleles(g.getPloidy()));
        if (g.hasPL()) {
            // lazy initialization of the genotype index map by ploidy.
            final int[] genotypeIndexMapByPloidy = genotypeIndexMapsByPloidy[ploidy] == null
                    ? GenotypeLikelihoodCalculators.getInstance(ploidy, maximumAlleleCount).genotypeIndexMap(indexesOfRelevantAlleles)
                    : genotypeIndexMapsByPloidy[ploidy];
            final int[] PLs = generatePL(g, genotypeIndexMapByPloidy);
            final int[] AD = g.hasAD() ? generateAD(g.getAD(), indexesOfRelevantAlleles) : null;
            genotypeBuilder.PL(PLs).AD(AD).noGQ();
        }
        mergedGenotypes.add(genotypeBuilder.make());
    }
}
 
开发者ID:PAA-NCIC,项目名称:SparkSeq,代码行数:39,代码来源:ReferenceConfidenceVariantContextMerger.java

示例6: combineSinglePools

import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
/**
 * Simple non-optimized version that combines GLs from several pools and produces global AF distribution.
 *
 * @param GLs                        Inputs genotypes context with per-pool GLs
 * @param numAlleles                 Number of alternate alleles
 * @param log10AlleleFrequencyPriors Frequency priors
 */
protected void combineSinglePools(final GenotypesContext GLs,
                                  final int defaultPloidy,
                                  final int numAlleles,
                                  final double[] log10AlleleFrequencyPriors) {

    // Combine each pool incrementally - likelihoods will be renormalized at each step

    // first element: zero ploidy, e.g. trivial degenerate distribution
    final int numAltAlleles = numAlleles - 1;
    final int[] zeroCounts = new int[numAlleles];
    final ExactACset set = new ExactACset(1, new ExactACcounts(zeroCounts));
    set.getLog10Likelihoods()[0] = 0.0;
    final StateTracker stateTracker = getStateTracker(false, numAltAlleles);
    int combinedPloidy = 0;
    CombinedPoolLikelihoods combinedPoolLikelihoods = new CombinedPoolLikelihoods();
    combinedPoolLikelihoods.add(set);

    for (final Genotype genotype : GLs.iterateInSampleNameOrder()) {
        // recover gls and check if they qualify.
        if (!genotype.hasPL())
            continue;
        final double[] gls = genotype.getLikelihoods().getAsVector();
        if (MathUtils.sum(gls) >= GATKVariantContextUtils.SUM_GL_THRESH_NOCALL)
            continue;
        stateTracker.reset();
        final int declaredPloidy = genotype.getPloidy();
        final int ploidy = declaredPloidy < 1 ? defaultPloidy : declaredPloidy;
        // they do qualify so we proceed.
        combinedPoolLikelihoods = fastCombineMultiallelicPool(combinedPoolLikelihoods, gls,
                combinedPloidy, ploidy, numAlleles, log10AlleleFrequencyPriors, stateTracker);
        combinedPloidy = ploidy + combinedPloidy; // total number of chromosomes in combinedLikelihoods
    }
    if (combinedPloidy == 0)
        stateTracker.setLog10LikelihoodOfAFzero(0.0);
}
 
开发者ID:PAA-NCIC,项目名称:SparkSeq,代码行数:43,代码来源:GeneralPloidyExactAFCalculator.java

示例7: combineSinglePools

import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
/**
 * Simple non-optimized version that combines GLs from several pools and produces global AF distribution.
 * @param GLs                              Inputs genotypes context with per-pool GLs
 * @param numAlleles                       Number of alternate alleles
 * @param log10AlleleFrequencyPriors       Frequency priors
 */
protected void combineSinglePools(final GenotypesContext GLs,
                                  final int defaultPloidy,
                                  final int numAlleles,
                                  final double[] log10AlleleFrequencyPriors) {

    // Combine each pool incrementally - likelihoods will be renormalized at each step

    // first element: zero ploidy, e.g. trivial degenerate distribution
    final int numAltAlleles = numAlleles - 1;
    final int[] zeroCounts = new int[numAlleles];
    final ExactACset set = new ExactACset(1, new ExactACcounts(zeroCounts));
    set.getLog10Likelihoods()[0] = 0.0;
    final StateTracker stateTracker = getStateTracker(false,numAltAlleles);
    int combinedPloidy = 0;
    CombinedPoolLikelihoods combinedPoolLikelihoods = new CombinedPoolLikelihoods();
    combinedPoolLikelihoods.add(set);

    for (final Genotype genotype : GLs.iterateInSampleNameOrder()) {
        // recover gls and check if they qualify.
        if (!genotype.hasPL())
            continue;
        final double[] gls = genotype.getLikelihoods().getAsVector();
        if (MathUtils.sum(gls) >= GaeaGvcfVariantContextUtils.SUM_GL_THRESH_NOCALL)
            continue;
        stateTracker.reset();
        final int declaredPloidy = genotype.getPloidy();
        final int ploidy = declaredPloidy < 1 ? defaultPloidy : declaredPloidy;
        // they do qualify so we proceed.
        combinedPoolLikelihoods = fastCombineMultiallelicPool(combinedPoolLikelihoods, gls,
                combinedPloidy, ploidy, numAlleles, log10AlleleFrequencyPriors, stateTracker);
        combinedPloidy = ploidy + combinedPloidy; // total number of chromosomes in combinedLikelihoods
    }
    if (combinedPloidy == 0)
        stateTracker.setLog10LikelihoodOfAFzero(0.0);
}
 
开发者ID:BGI-flexlab,项目名称:SOAPgaea,代码行数:42,代码来源:GeneralPloidyExactAFCalculator.java

示例8: genotypeCanBeMergedInCurrentBlock

import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
private boolean genotypeCanBeMergedInCurrentBlock(final Genotype g) {
    return currentBlock != null && currentBlock.withinBounds(g.getGQ()) && currentBlock.getPloidy() == g.getPloidy()
            && (currentBlock.getMinPLs() == null || !g.hasPL() || (currentBlock.getMinPLs().length == g.getPL().length));
}
 
开发者ID:PAA-NCIC,项目名称:SparkSeq,代码行数:5,代码来源:GVCFWriter.java

示例9: mergeRefConfidenceGenotypes

import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
/**
 * Merge into the context a new genotype represented by the given
 * VariantContext for the provided list of target alleles. This method
 * assumes that none of the alleles in the VC overlaps with any of the
 * alleles in the set.
 */
private static void mergeRefConfidenceGenotypes(final GenotypesContext mergedGenotypes, final VariantContext vc,
		final List<Allele> remappedAlleles, final List<Allele> targetAlleles, final boolean samplesAreUniquified,
		final boolean shouldComputePLs) {
	final int maximumPloidy = vc.getMaxPloidy(GaeaGvcfVariantContextUtils.DEFAULT_PLOIDY);
	// the map is different depending on the ploidy, so in order to keep
	// this method flexible (mixed ploidies)
	// we need to get a map done (lazily inside the loop) for each ploidy,
	// up to the maximum possible.
	final int[][] genotypeIndexMapsByPloidy = new int[maximumPloidy + 1][];
	final int maximumAlleleCount = Math.max(remappedAlleles.size(), targetAlleles.size());

	for (final Genotype g : vc.getGenotypes()) {
		final String name;
		if (samplesAreUniquified)
			name = g.getSampleName() + "." + vc.getSource();
		else
			name = g.getSampleName();
		final int ploidy = g.getPloidy();
		final GenotypeBuilder genotypeBuilder = new GenotypeBuilder(g)
				.alleles(GaeaGvcfVariantContextUtils.noCallAlleles(g.getPloidy())).noPL();
		genotypeBuilder.name(name);

		final boolean doPLs = shouldComputePLs && g.hasPL();
		final boolean hasAD = g.hasAD();
		final boolean hasSAC = g.hasExtendedAttribute(GaeaVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY);
		if (doPLs || hasSAC || hasAD) {
			final int[] perSampleIndexesOfRelevantAlleles = getIndexesOfRelevantAlleles(remappedAlleles,
					targetAlleles, vc.getStart(), g);
			if (doPLs) {
				// lazy initialization of the genotype index map by ploidy.

				final int[] genotypeIndexMapByPloidy = genotypeIndexMapsByPloidy[ploidy] == null
						? GenotypeLikelihoodCalculators.getInstance(ploidy, maximumAlleleCount).genotypeIndexMap(
								perSampleIndexesOfRelevantAlleles)
						: genotypeIndexMapsByPloidy[ploidy];
				final int[] PLs = generatePL(g, genotypeIndexMapByPloidy);
				genotypeBuilder.PL(PLs);
			}
			if (hasAD) {
				genotypeBuilder.AD(generateAD(g.getAD(), perSampleIndexesOfRelevantAlleles));
			}
			if (hasSAC) {
				final List<Integer> sacIndexesToUse = adaptToSACIndexes(perSampleIndexesOfRelevantAlleles);
				final int[] SACs = GaeaGvcfVariantContextUtils.makeNewSACs(g, sacIndexesToUse);
				genotypeBuilder.attribute(GaeaVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, SACs);
			}
		}
		mergedGenotypes.add(genotypeBuilder.make());
	}
}
 
开发者ID:BGI-flexlab,项目名称:SOAPgaea,代码行数:57,代码来源:ReferenceConfidenceVariantContextMerger.java

示例10: getIndexesOfRelevantAlleles

import htsjdk.variant.variantcontext.Genotype; //导入方法依赖的package包/类
protected static int[] getIndexesOfRelevantAlleles(final List<Allele> remappedAlleles,
		final List<Allele> targetAlleles, final int position, final Genotype g) {

	if (remappedAlleles == null || remappedAlleles.isEmpty())
		throw new IllegalArgumentException("The list of input alleles must not be null or empty");
	if (targetAlleles == null || targetAlleles.isEmpty())
		throw new IllegalArgumentException("The list of target alleles must not be null or empty");

	if (!remappedAlleles.contains(GaeaVCFConstants.NON_REF_SYMBOLIC_ALLELE))
		throw new UserException("The list of input alleles must contain " + GaeaVCFConstants.NON_REF_SYMBOLIC_ALLELE
				+ " as an allele but that is not the case at position " + position
				+ "; please use the Haplotype Caller with gVCF output to generate appropriate records");

	final int indexOfNonRef = remappedAlleles.indexOf(GaeaVCFConstants.NON_REF_SYMBOLIC_ALLELE);
	final int[] indexMapping = new int[targetAlleles.size()];

	// the reference likelihoods should always map to each other (even if
	// the alleles don't)
	indexMapping[0] = 0;

	// create the index mapping, using the <NON-REF> allele whenever such a
	// mapping doesn't exist
	for (int i = 1; i < targetAlleles.size(); i++) {
		final Allele targetAllele = targetAlleles.get(i);

		// if there’s more than 1 DEL allele then we need to use the best
		// one
		if (targetAllele == Allele.SPAN_DEL && g.hasPL()) {
			final int occurrences = Collections.frequency(remappedAlleles, Allele.SPAN_DEL);
			if (occurrences > 1) {
				final int indexOfBestDel = indexOfBestDel(remappedAlleles, g.getPL(), g.getPloidy());
				indexMapping[i] = (indexOfBestDel == -1 ? indexOfNonRef : indexOfBestDel);
				continue;
			}
		}

		final int indexOfRemappedAllele = remappedAlleles.indexOf(targetAllele);
		indexMapping[i] = indexOfRemappedAllele == -1 ? indexOfNonRef : indexOfRemappedAllele;
	}

	return indexMapping;
}
 
开发者ID:BGI-flexlab,项目名称:SOAPgaea,代码行数:43,代码来源:ReferenceConfidenceVariantContextMerger.java


注:本文中的htsjdk.variant.variantcontext.Genotype.hasPL方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。