本文整理汇总了Java中htsjdk.variant.variantcontext.GenotypeBuilder.attribute方法的典型用法代码示例。如果您正苦于以下问题:Java GenotypeBuilder.attribute方法的具体用法?Java GenotypeBuilder.attribute怎么用?Java GenotypeBuilder.attribute使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类htsjdk.variant.variantcontext.GenotypeBuilder
的用法示例。
在下文中一共展示了GenotypeBuilder.attribute方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: blockToVCF
import htsjdk.variant.variantcontext.GenotypeBuilder; //导入方法依赖的package包/类
/**
* Convert a HomRefBlock into a VariantContext
*
* @param block the block to convert
* @return a VariantContext representing the gVCF encoding for this block.
* It will return {@code null} if input {@code block} is {@code null}, indicating that there
* is no variant-context to be output into the VCF.
*/
private VariantContext blockToVCF(final HomRefBlock block) {
if ( block == null ) return null;
final VariantContextBuilder vcb = new VariantContextBuilder(block.getStartingVC());
vcb.attributes(new HashMap<String, Object>(2)); // clear the attributes
vcb.stop(block.getStop());
vcb.attribute(VCFConstants.END_KEY, block.getStop());
// create the single Genotype with GQ and DP annotations
final GenotypeBuilder gb = new GenotypeBuilder(sampleName, GATKVariantContextUtils.homozygousAlleleList(block.getRef(),block.getPloidy()));
gb.noAD().noPL().noAttributes(); // clear all attributes
gb.GQ(block.getMedianGQ());
gb.DP(block.getMedianDP());
gb.attribute(MIN_DP_FORMAT_FIELD, block.getMinDP());
gb.PL(block.getMinPLs());
// This annotation is no longer standard
//gb.attribute(MIN_GQ_FORMAT_FIELD, block.getMinGQ());
return vcb.genotypes(gb.make()).make();
}
示例2: annotate
import htsjdk.variant.variantcontext.GenotypeBuilder; //导入方法依赖的package包/类
@Override
public void annotate(final RefMetaDataTracker tracker,
final AnnotatorCompatible walker,
final ReferenceContext ref,
final AlignmentContext stratifiedContext,
final VariantContext vc,
final Genotype g,
final GenotypeBuilder gb,
final PerReadAlleleLikelihoodMap alleleLikelihoodMap) {
if ( ! isAppropriateInput(alleleLikelihoodMap, g) )
return;
final int[][] table = FisherStrand.getContingencyTable(Collections.singletonMap(g.getSampleName(), alleleLikelihoodMap), vc, 0);
gb.attribute(STRAND_BIAS_BY_SAMPLE_KEY_NAME, FisherStrand.getContingencyArray(table));
}
示例3: annotate
import htsjdk.variant.variantcontext.GenotypeBuilder; //导入方法依赖的package包/类
public void annotate(final RefMetaDataTracker tracker,
final AnnotatorCompatible walker,
final ReferenceContext ref,
final AlignmentContext stratifiedContext,
final VariantContext vc,
final Genotype g,
final GenotypeBuilder gb,
final PerReadAlleleLikelihoodMap alleleLikelihoodMap){
if ( g == null || !g.isCalled() || stratifiedContext == null )
return;
int mq0 = 0;
final ReadBackedPileup pileup = stratifiedContext.getBasePileup();
for (PileupElement p : pileup ) {
if ( p.getMappingQual() == 0 )
mq0++;
}
gb.attribute(getKeyNames().get(0), mq0);
}
示例4: annotate
import htsjdk.variant.variantcontext.GenotypeBuilder; //导入方法依赖的package包/类
public void annotate(final VariantDataTracker tracker,
final ChromosomeInformationShare ref,
final Pileup pileup,
final VariantContext vc,
final Genotype g,
final GenotypeBuilder gb,
final PerReadAlleleLikelihoodMap alleleLikelihoodMap){
if ( g == null || !g.isCalled() || pileup == null )
return;
int mq0 = 0;
for (PileupReadInfo p : pileup.getTotalPileup() ) {
if ( p.getMappingQuality() == 0 )
mq0++;
}
gb.attribute(getKeyNames().get(0), mq0);
}
示例5: annotate
import htsjdk.variant.variantcontext.GenotypeBuilder; //导入方法依赖的package包/类
public void annotate(final VariantDataTracker tracker,
final ChromosomeInformationShare ref,
final Pileup pileup,
final VariantContext vc,
final Genotype g,
final GenotypeBuilder gb,
final PerReadAlleleLikelihoodMap alleleLikelihoodMap){
if ( pileup == null )
return;
Double ratio = annotateSNP(pileup, vc, g);
if (ratio == null)
return;
gb.attribute(getKeyNames().get(0), Double.valueOf(String.format("%.2f", ratio.doubleValue())));
}
示例6: subsetGenotypeAllelesWithLikelihoods
import htsjdk.variant.variantcontext.GenotypeBuilder; //导入方法依赖的package包/类
/**
* From a given genotype, subset the PLs and SACs
* @param g genotype to subset
* @param allelesToUse alleles to subset
* @param vc variant context with alleles and genotypes
* @param ploidy number of chromosomes
* @param assignGenotypes true: assign hard genotypes, false: leave as no-call
* @param newLikelihoods the PL values
* @return genotype with the subsetted PLsL and SACs
*/
private Genotype subsetGenotypeAllelesWithLikelihoods(final Genotype g, final List<Allele> allelesToUse, final VariantContext vc, int ploidy,
final boolean assignGenotypes, final double[] newLikelihoods) {
final GenotypeBuilder gb = new GenotypeBuilder(g);
final String sampleName = g.getSampleName();
// add likelihoods
gb.PL(newLikelihoods);
// get and add subsetted SACs
final int[] newSACs = subsetSACAlleles(g, allelesToUse, vc);
if (newSACs != null)
gb.attribute(GaeaVCFConstants.STRAND_COUNT_BY_SAMPLE_KEY, newSACs);
if (assignGenotypes)
assignGenotype(gb, vc, sampleName, newLikelihoods, allelesToUse, ploidy);
else
gb.alleles(GaeaGvcfVariantContextUtils.noCallAlleles(ploidy));
return gb.make();
}
示例7: annotate
import htsjdk.variant.variantcontext.GenotypeBuilder; //导入方法依赖的package包/类
/**
* Calculate annotations for eah allele based on given VariantContext and likelihoods for a given genotype's sample
* and add the annotations to the GenotypeBuilder. See parent class docs in {@link GenotypeAnnotation}.
*/
public void annotate(final ReferenceContext ref,
final VariantContext vc,
final Genotype g,
final GenotypeBuilder gb,
final ReadLikelihoods<Allele> likelihoods) {
Utils.nonNull(gb);
Utils.nonNull(vc);
if ( g == null || likelihoods == null ) {
return;
}
final Map<Allele, List<Integer>> values = likelihoods.alleles().stream()
.collect(Collectors.toMap(a -> a, a -> new ArrayList<>()));
Utils.stream(likelihoods.bestAlleles(g.getSampleName()))
.filter(ba -> ba.isInformative() && isUsableRead(ba.read))
.forEach(ba -> getValueForRead(ba.read, vc).ifPresent(v -> values.get(ba.allele).add(v)));
final int[] statistics = vc.getAlleles().stream().mapToInt(a -> aggregate(values.get(a))).toArray();
gb.attribute(getVcfKey(), statistics);
}
示例8: annotate
import htsjdk.variant.variantcontext.GenotypeBuilder; //导入方法依赖的package包/类
@Override
public void annotate(final ReferenceContext ref,
final VariantContext vc,
final Genotype g,
final GenotypeBuilder gb,
final ReadLikelihoods<Allele> likelihoods) {
Utils.nonNull(vc);
Utils.nonNull(g);
Utils.nonNull(gb);
if ( likelihoods == null || !g.isCalled() ) {
logger.warn("Annotation will not be calculated, genotype is not called or alleleLikelihoodMap is null");
return;
}
final int[][] table = FisherStrand.getContingencyTable(likelihoods, vc, 0, Arrays.asList(g.getSampleName()));
gb.attribute(GATKVCFConstants.STRAND_BIAS_BY_SAMPLE_KEY, getContingencyArray(table));
}
示例9: annotate
import htsjdk.variant.variantcontext.GenotypeBuilder; //导入方法依赖的package包/类
@Override
public void annotate(final ReferenceContext ref,
final VariantContext vc,
final Genotype g,
final GenotypeBuilder gb,
final ReadLikelihoods<Allele> likelihoods) {
if (g.isHomRef()) {
// skip the normal sample
return;
}
final Allele altAllele = vc.getAlternateAllele(0); // assume single-allelic
final String tumorSampleName = g.getSampleName();
Collection<ReadLikelihoods<Allele>.BestAllele> tumorBestAlleles = likelihoods.bestAlleles(tumorSampleName);
// Build a map from the (Start Position, Fragment Size) tuple to the count of reads with that
// start position and fragment size
Map<ImmutablePair<Integer, Integer>, Long> duplicateReadMap = tumorBestAlleles.stream()
.filter(ba -> ba.allele.equals(altAllele) && ba.isInformative())
.map(ba -> new ImmutablePair<>(ba.read.getStart(), ba.read.getFragmentLength()))
.collect(Collectors.groupingBy(x -> x, Collectors.counting()));
gb.attribute(UNIQUE_ALT_READ_SET_COUNT_KEY, duplicateReadMap.size());
}
示例10: annotate
import htsjdk.variant.variantcontext.GenotypeBuilder; //导入方法依赖的package包/类
/**
* Calculate annotations for each allele based on given VariantContext and likelihoods for a given genotype's sample
* and add the annotations to the GenotypeBuilder. By default annotations are only calculated for alt alleles but
* implementations may override the {@code includeRefAllele()} method. See parent class docs in {@link GenotypeAnnotation}.
*/
public void annotate(final ReferenceContext ref,
final VariantContext vc,
final Genotype g,
final GenotypeBuilder gb,
final ReadLikelihoods<Allele> likelihoods) {
Utils.nonNull(gb);
Utils.nonNull(vc);
if ( g == null || likelihoods == null ) {
return;
}
final Map<Allele, List<Integer>> values = likelihoods.alleles().stream()
.collect(Collectors.toMap(a -> a, a -> new ArrayList<>()));
Utils.stream(likelihoods.bestAlleles(g.getSampleName()))
.filter(ba -> ba.isInformative() && isUsableRead(ba.read))
.forEach(ba -> getValueForRead(ba.read, vc).ifPresent(v -> values.get(ba.allele).add(v)));
final int[] statistics = vc.getAlleles().stream().filter(this::includeAllele).mapToInt(a -> aggregate(values.get(a))).toArray();
gb.attribute(getVcfKey(), statistics);
}
示例11: composeNonTruthOverlappingGenotype
import htsjdk.variant.variantcontext.GenotypeBuilder; //导入方法依赖的package包/类
private Genotype composeNonTruthOverlappingGenotype(final VariantContext enclosingContext, final Genotype genotype) {
final GenotypeBuilder builder = new GenotypeBuilder(genotype.getSampleName());
if (genotype.isCalled()) {
GATKProtectedVariantContextUtils.setGenotypeQualityFromPLs(builder, genotype);
final int[] PL = genotype.getPL();
final int callAlleleIndex = GATKProtectedMathUtils.minIndex(PL);
final double quality = callQuality(genotype);
builder.alleles(Collections.singletonList(enclosingContext.getAlleles().get(callAlleleIndex)));
builder.attribute(VariantEvaluationContext.CALL_QUALITY_KEY, quality);
final boolean discovered = XHMMSegmentGenotyper.DISCOVERY_TRUE.equals(
GATKProtectedVariantContextUtils.getAttributeAsString(genotype, XHMMSegmentGenotyper.DISCOVERY_KEY,
XHMMSegmentGenotyper.DISCOVERY_FALSE));
if (callAlleleIndex != 0 && discovered) {
builder.attribute(VariantEvaluationContext.EVALUATION_CLASS_KEY, EvaluationClass.UNKNOWN_POSITIVE.acronym);
}
if (quality < filterArguments.minimumCalledSegmentQuality) {
builder.filter(EvaluationFilter.LowQuality.acronym);
} else {
builder.filter(EvaluationFilter.PASS);
}
} else { /* assume it is REF */
/* TODO this is a hack to make Andrey's CODEX vcf work; and in general, VCFs that only include discovered
* variants and NO_CALL (".") on other samples. The idea is to force the evaluation tool to take it call
* as REF on all other samples. Otherwise, the effective allele frequency of the variant will be erroneously
* high and will be filtered. */
builder.alleles(Collections.singletonList(CopyNumberTriStateAllele.REF));
builder.attribute(VariantEvaluationContext.CALL_QUALITY_KEY, 100000);
builder.filter(EvaluationFilter.PASS);
}
return builder.make();
}
示例12: createHomRefGenotype
import htsjdk.variant.variantcontext.GenotypeBuilder; //导入方法依赖的package包/类
private Genotype createHomRefGenotype(String sampleName) {
final GenotypeBuilder gb = new GenotypeBuilder(sampleName, Collections.nCopies(getPloidy(), getRef()));
gb.noAD().noPL().noAttributes(); // clear all attributes
final int[] minPLs = getMinPLs();
gb.PL(minPLs);
gb.GQ(GATKVariantContextUtils.calculateGQFromPLs(minPLs));
gb.DP(getMedianDP());
gb.attribute(GATKVCFConstants.MIN_DP_FORMAT_KEY, getMinDP());
return gb.make();
}
示例13: annotate
import htsjdk.variant.variantcontext.GenotypeBuilder; //导入方法依赖的package包/类
@Override
public void annotate(final ReferenceContext refContext,
final VariantContext vc,
final Genotype g,
final GenotypeBuilder gb,
final ReadLikelihoods<Allele> likelihoods){
Utils.nonNull(gb, "gb is null");
Utils.nonNull(vc, "vc is null");
if (g == null || likelihoods == null) {
return;
}
final Map<Allele, MutableInt> f1r2Counts = likelihoods.alleles().stream()
.collect(Collectors.toMap(a -> a, a -> new MutableInt(0)));
final Map<Allele, MutableInt> f2r1Counts = likelihoods.alleles().stream()
.collect(Collectors.toMap(a -> a, a -> new MutableInt(0)));
Utils.stream(likelihoods.bestAlleles(g.getSampleName()))
.filter(ba -> ba.isInformative() && isUsableRead(ba.read))
.forEach(ba -> (isF2R1(ba.read) ? f2r1Counts : f1r2Counts).get(ba.allele).increment());
final int[] f1r2 = vc.getAlleles().stream().mapToInt(a -> f1r2Counts.get(a).intValue()).toArray();
final int[] f2r1 = vc.getAlleles().stream().mapToInt(a -> f2r1Counts.get(a).intValue()).toArray();
gb.attribute(GATKVCFConstants.F1R2_KEY, f1r2);
gb.attribute(GATKVCFConstants.F2R1_KEY, f2r1);
}
示例14: annotateSingleVariant
import htsjdk.variant.variantcontext.GenotypeBuilder; //导入方法依赖的package包/类
/**
* Annotate the given variant context with the OxoG read count attributes, directly from the read pileup.
*
* This method may be slow and should be considered EXPERIMENTAL, especially with regard to indels and complex/mixed
* variants.
*
* @param vc variant context for the genotype. Necessary so that we can see all alleles.
* @param gb genotype builder to put the annotations into.
* @param readPileup pileup of the reads at this vc. Note that this pileup does not have to match the
* genotype. In other words, this tool does not check that the pileup was generated from the
* genotype sample.
*/
public static void annotateSingleVariant(final VariantContext vc, final GenotypeBuilder gb,
final ReadPileup readPileup, int meanBaseQualityCutoff) {
Utils.nonNull(gb, "gb is null");
Utils.nonNull(vc, "vc is null");
// Create a list of unique alleles
final List<Allele> variantAllelesWithDupes = vc.getAlleles();
final Set<Allele> alleleSet = new LinkedHashSet<>(variantAllelesWithDupes);
final List<Allele> variantAlleles = new ArrayList<>(alleleSet);
// Initialize the mappings
final Map<Allele, MutableInt> f1r2Counts = variantAlleles.stream()
.collect(Collectors.toMap(Function.identity(), a -> new MutableInt(0)));
final Map<Allele, MutableInt> f2r1Counts = variantAlleles.stream()
.collect(Collectors.toMap(Function.identity(), a -> new MutableInt(0)));
final List<Allele> referenceAlleles = variantAlleles.stream().filter(a -> a.isReference() && !a.isSymbolic()).collect(Collectors.toList());
final List<Allele> altAlleles = variantAlleles.stream().filter(a -> a.isNonReference() && !a.isSymbolic()).collect(Collectors.toList());
if (referenceAlleles.size() != 1) {
logger.warn("Number of reference alleles does not equal for VC: " + vc);
}
// We MUST have exactly 1 non-symbolic reference allele and a read pileup,
if ((referenceAlleles.size() == 1) && (readPileup != null) && !referenceAlleles.get(0).isSymbolic()) {
final Allele referenceAllele = referenceAlleles.get(0);
Utils.stream(readPileup)
.filter(pe -> isUsableRead(pe.getRead()))
.forEach(pe -> incrementCounts(pe, f1r2Counts, f2r1Counts, referenceAllele, altAlleles, meanBaseQualityCutoff));
}
final int[] f1r2 = variantAlleles.stream().mapToInt(a -> f1r2Counts.get(a).intValue()).toArray();
final int[] f2r1 = variantAlleles.stream().mapToInt(a -> f2r1Counts.get(a).intValue()).toArray();
gb.attribute(GATKVCFConstants.F1R2_KEY, f1r2);
gb.attribute(GATKVCFConstants.F2R1_KEY, f2r1);
}
示例15: makeGenotype
import htsjdk.variant.variantcontext.GenotypeBuilder; //导入方法依赖的package包/类
public Genotype makeGenotype() {
GenotypeBuilder gb= new GenotypeBuilder(this.g);
String fmt = getReview(); if(fmt==null) fmt="";
fmt= fmt.trim().replaceAll("[ \n\t\\:_]+","_");
gb.attribute(IgvReview.this.reviewFormat.getID(), fmt);
return gb.make();
}