当前位置: 首页>>代码示例>>Java>>正文


Java ValidationStringency类代码示例

本文整理汇总了Java中htsjdk.samtools.ValidationStringency的典型用法代码示例。如果您正苦于以下问题:Java ValidationStringency类的具体用法?Java ValidationStringency怎么用?Java ValidationStringency使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


ValidationStringency类属于htsjdk.samtools包,在下文中一共展示了ValidationStringency类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: main

import htsjdk.samtools.ValidationStringency; //导入依赖的package包/类
public static void main(String args[]) throws IOException, ParseException {
      Options options = new Options();
      options.addOption("u","uniquehits",false,"only output hits with a single mapping");
      options.addOption("s","nosuboptimal",false,"do not include hits whose score is not equal to the best score for the read");
      options.addOption("p","pairedend",false,"output paired-end hits");
      options.addOption("j","junctions",false,"output junction mapping reads (reads with gaps)");
      CommandLineParser parser = new GnuParser();
      CommandLine cl = parser.parse( options, args, false );            
  	uniqueOnly = cl.hasOption("uniquehits");
  	filterSubOpt = cl.hasOption("nosuboptimal");
  	inclPairedEnd = cl.hasOption("pairedend");
  	inclJunction = cl.hasOption("junctions");
  	SamReaderFactory factory =
          SamReaderFactory.makeDefault()
              .enable(SamReaderFactory.Option.INCLUDE_SOURCE_IN_RECORDS, SamReaderFactory.Option.VALIDATE_CRC_CHECKSUMS)
              .validationStringency(ValidationStringency.SILENT);
SamReader reader = factory.open(SamInputResource.of(System.in));
      CloseableIterator<SAMRecord> iter = reader.iterator();
      while (iter.hasNext()) {
          SAMRecord record = iter.next();
          if (record.getReadUnmappedFlag()) {continue; }
          processRecord(record);
      }
      iter.close();
      reader.close();
  }
 
开发者ID:seqcode,项目名称:seqcode-core,代码行数:27,代码来源:TophatSAMToReadDB.java

示例2: main

import htsjdk.samtools.ValidationStringency; //导入依赖的package包/类
public static void main(String[] args) throws IOException {
	File bamFile = new File(
			"./data/miniCaviar_IDT_NEB.runA.NA12878.bwa.bam");
	File newBamFile = new File(
			"./data/miniCaviar_IDT_NEB.runA.NA12878.bwa.chrom1.bam");
	SAMFileWriter outputSam = null;
	final SamReader reader = SamReaderFactory.makeDefault()
			.validationStringency(ValidationStringency.SILENT)
			.open(bamFile);

	outputSam = new SAMFileWriterFactory().makeBAMWriter(
			reader.getFileHeader(), true, newBamFile);

	int currentReads = 0;
	for (final SAMRecord samRecord : reader) {
		if (samRecord.getReferenceIndex() == 0)
			writeBam(samRecord, outputSam);

	}

	reader.close();
	outputSam.close();
}
 
开发者ID:acs6610987,项目名称:secram,代码行数:24,代码来源:DownSamplingBAM.java

示例3: main

import htsjdk.samtools.ValidationStringency; //导入依赖的package包/类
public static void main(String[] args) {
	String bam = "./data/HG00115.chrom11.ILLUMINA.bwa.GBR.exome.20130415.bam";
	SamReader reader = SamReaderFactory.makeDefault()
			.validationStringency(ValidationStringency.SILENT)
			.open(new File(bam));
	int length = 0;
	for (final SAMRecord record : reader) {
		if (record.getReadUnmappedFlag())
			continue;
		length += record.getReadLength();
	}

	ReferenceSequenceFile rsf = ReferenceSequenceFileFactory
			.getReferenceSequenceFile(new File("data/hs37d5.fa"));
	ReferenceSequence rs = rsf.getSequence("11");

	System.out.println(length * 1.0 / rs.length());
}
 
开发者ID:acs6610987,项目名称:secram,代码行数:19,代码来源:BAMAvgCoverage.java

示例4: main

import htsjdk.samtools.ValidationStringency; //导入依赖的package包/类
public static void main(String[] args) throws IOException {
	SamReader reader = SamReaderFactory
			.makeDefault()
			.validationStringency(ValidationStringency.SILENT)
			.open(new File(
					"./data/miniCaviar_IDT_NEB.runA.NA12878.bwa.chrom1.bam"));

	SAMFileHeader samFileHeader = reader.getFileHeader();

	System.out.println(samFileHeader.getTextHeader());

	int i = 0;
	for (SAMRecord record : reader) {
		if (i < 100)
			System.out.println(record.getAlignmentStart());
		i++;
	}
	reader.close();
}
 
开发者ID:acs6610987,项目名称:secram,代码行数:20,代码来源:CheckSamHeader.java

示例5: setSamSeqDictFromBam

import htsjdk.samtools.ValidationStringency; //导入依赖的package包/类
private boolean setSamSeqDictFromBam(String bamfile) {

		/*  ------------------------------------------------------ */
		/* This chunk prepares SamReader from local bam            */
		SamReaderFactory srf=SamReaderFactory.make();
		srf.validationStringency(ValidationStringency.SILENT);
		SamReader samReader;
		samReader= srf.open(new File(bamfile));
		/*  ------------------------------------------------------ */
		
		SAMSequenceDictionary seqDict = samReader.getFileHeader().getSequenceDictionary();
		if(seqDict != null && !seqDict.isEmpty()){
			this.setSamSeqDictSource(new File(bamfile).getAbsolutePath());
			this.setSamSeqDict(seqDict);
			return true;
		}
		return false;
	}
 
开发者ID:dariober,项目名称:ASCIIGenome,代码行数:19,代码来源:GenomicCoords.java

示例6: getAlignedReadCount

import htsjdk.samtools.ValidationStringency; //导入依赖的package包/类
public static long getAlignedReadCount(String bam) throws IOException{

		/*  ------------------------------------------------------ */
		/* This chunk prepares SamReader from local bam or URL bam */
		UrlValidator urlValidator = new UrlValidator();
		SamReaderFactory srf=SamReaderFactory.make();
		srf.validationStringency(ValidationStringency.SILENT);
		SamReader samReader;
		if(urlValidator.isValid(bam)){
			samReader = SamReaderFactory.makeDefault().open(
					SamInputResource.of(new URL(bam)).index(new URL(bam + ".bai"))
			);
		} else {
			samReader= srf.open(new File(bam));
		}
		/*  ------------------------------------------------------ */

		List<SAMSequenceRecord> sequences = samReader.getFileHeader().getSequenceDictionary().getSequences();
		long alnCount= 0;
		for(SAMSequenceRecord x : sequences){
			alnCount += samReader.indexing().getIndex().getMetaData(x.getSequenceIndex()).getAlignedRecordCount();
		}
		samReader.close();
		return alnCount;
    }
 
开发者ID:dariober,项目名称:ASCIIGenome,代码行数:26,代码来源:Utils.java

示例7: bamHasIndex

import htsjdk.samtools.ValidationStringency; //导入依赖的package包/类
public static boolean bamHasIndex(String bam) throws IOException{

		/*  ------------------------------------------------------ */
		/* This chunk prepares SamReader from local bam or URL bam */
		UrlValidator urlValidator = new UrlValidator();
		SamReaderFactory srf=SamReaderFactory.make();
		srf.validationStringency(ValidationStringency.SILENT);
		SamReader samReader;
		if(urlValidator.isValid(bam)){
			samReader = SamReaderFactory.makeDefault().open(
					SamInputResource.of(new URL(bam)).index(new URL(bam + ".bai"))
			);
		} else {
			samReader= srf.open(new File(bam));
		}
		/*  ------------------------------------------------------ */

		// SamReaderFactory srf=SamReaderFactory.make();
		// srf.validationStringency(ValidationStringency.SILENT);
		// SamReader samReader = srf.open(new File(bam));
		boolean hasIndex= samReader.hasIndex();
		samReader.close();
		return hasIndex;
		
	}
 
开发者ID:dariober,项目名称:ASCIIGenome,代码行数:26,代码来源:Utils.java

示例8: getSAMFileReader

import htsjdk.samtools.ValidationStringency; //导入依赖的package包/类
private SAMFileReader getSAMFileReader(String samFile, long startPosition) {
    try {
        SeekableStream stream = IGVSeekableStreamFactory.getInstance().getStreamFor(samFile);
        if (startPosition >= 0) {
            stream.seek(startPosition);
        }
        SAMFileReader reader = new SAMFileReader(stream);
        reader.setValidationStringency(ValidationStringency.SILENT);

        //Need to keep the file source, if loading lazily
        //TODO Can't reload from SAM files. See SAMTextReader.getIterator
        //reader.enableFileSource(PicardAlignment.DEFAULT_LAZY_LOAD);

        return reader;
    } catch (IOException ex) {
        log.error("Error opening sam file", ex);
        throw new RuntimeException("Error opening: " + samFile, ex);
    }
}
 
开发者ID:hyounesy,项目名称:ALEA,代码行数:20,代码来源:SAMReader.java

示例9: getReadPCollection

import htsjdk.samtools.ValidationStringency; //导入依赖的package包/类
/**
 * Create a {@link PCollection<GATKRead>} containing all the reads overlapping the given intervals.
 * Reads that are unmapped are ignored.
 * @param intervals a list of SimpleIntervals.  These must be non-overlapping intervals or the results are undefined.
 * @param stringency how to react to malformed reads.
 * @param includeUnmappedReads to include unmapped reads.
 * @return a PCollection containing all the reads that overlap the given intervals.
 */
public PCollection<GATKRead> getReadPCollection(List<SimpleInterval> intervals, ValidationStringency stringency, boolean includeUnmappedReads) {
    PCollection<GATKRead> preads;
    if(cloudStorageUrl){
        Iterable<Contig> contigs = intervals.stream()
                .map(i -> new Contig(i.getContig(), i.getStart(), i.getEnd()))
                .collect(Collectors.toList());
        try {
            PCollection<Read> rawReads = ReadBAMTransform.getReadsFromBAMFilesSharded(pipeline, auth,contigs, new ReaderOptions(stringency, includeUnmappedReads), bam, ShardingPolicy.LOCI_SIZE_POLICY);
            preads = rawReads.apply(new GoogleGenomicsReadToGATKRead());
        } catch (IOException ex) {
            throw new UserException.CouldNotReadInputFile("Unable to read "+bam, ex);
        }
    } else if (hadoopUrl) {
        preads = DataflowUtils.getReadsFromHadoopBam(pipeline, intervals, stringency, bam);
    } else {
        preads = DataflowUtils.getReadsFromLocalBams(pipeline, intervals, stringency, ImmutableList.of(new File(bam)));
    }
    return preads;
}
 
开发者ID:broadinstitute,项目名称:gatk-dataflow,代码行数:28,代码来源:ReadsDataflowSource.java

示例10: setupPipeline

import htsjdk.samtools.ValidationStringency; //导入依赖的package包/类
@Override
protected void setupPipeline(Pipeline pipeline) {
    if (readArguments.getReadFilesNames().size()>1) {
        throw new UserException("Sorry, we only support a single input file for now.");
    }
    final String filename = readArguments.getReadFilesNames().get(0);
    final ReadsDataflowSource readsSource = new ReadsDataflowSource(filename, pipeline);
    final SAMFileHeader header = readsSource.getHeader();
    final PCollectionView<SAMFileHeader> headerView = pipeline.apply(Create.of(header)).apply(View.asSingleton());
    final SAMSequenceDictionary sequenceDictionary = header.getSequenceDictionary();
    final List<SimpleInterval> intervals = intervalArgumentCollection.intervalsSpecified() ? intervalArgumentCollection.getIntervals(sequenceDictionary)
            : IntervalUtils.getAllIntervalsForReference(sequenceDictionary);
    final PCollectionView<BaseRecalOutput> recalInfoSingletonView = BaseRecalOutputSource.loadFileOrRemote(pipeline, BQSR_RECAL_FILE_NAME).apply(View.asSingleton());
    final PCollection<GATKRead> output = readsSource.getReadPCollection(intervals, ValidationStringency.SILENT, false)
            .apply(new ApplyBQSRTransform(headerView, recalInfoSingletonView, bqsrOpts));
    intermediateRemoteBam = OUTPUT;
    if (needsIntermediateCopy()) {
        // The user specified remote execution and provided a local file name. So we're going to have to save to remote storage as a go-between.
        // Note that this may require more permissions
        intermediateRemoteBam = BucketUtils.randomRemotePath(stagingLocation, "temp-applyBqsr-output-", ".bam");
        logger.info("Staging results at " + intermediateRemoteBam);
    }
    SmallBamWriter.writeToFile(pipeline, output, header, intermediateRemoteBam);
}
 
开发者ID:broadinstitute,项目名称:gatk-dataflow,代码行数:25,代码来源:ApplyBQSRDataflow.java

示例11: testGetReadsFromHadoopBam

import htsjdk.samtools.ValidationStringency; //导入依赖的package包/类
@Test
public void testGetReadsFromHadoopBam() {
    String dataflowRunner = DataflowCommandLineProgramTest.getExternallySpecifiedRunner();
    if (!SparkPipelineRunner.class.getSimpleName().equals(dataflowRunner)) {
        return; // only run if SparkPipelineRunner specified
    }
    List<SimpleInterval> intervals = Arrays.asList(new SimpleInterval("chr7:1-202"), new SimpleInterval("chr8:2-202"));
    File inputFile = new File(getToolTestDataDir(), "example_reads.bam");
    List<GATKRead> expected = getReadsFromFile(intervals, inputFile, false);

    Pipeline p = GATKTestPipeline.create();
    DataflowUtils.registerGATKCoders(p);
    PCollection<GATKRead> reads = DataflowUtils.getReadsFromHadoopBam(p, intervals, ValidationStringency.SILENT,
            new Path(inputFile.getAbsoluteFile().toURI()).toString());
    EvaluationResult result = SparkPipelineRunner.create().run(p);

    Assert.assertTrue(ReadUtils.readListsAreEqualIgnoreUUID(expected, Lists.newArrayList(result.get(reads))), "Actual reads do not match expected reads");
}
 
开发者ID:broadinstitute,项目名称:gatk-dataflow,代码行数:19,代码来源:DataflowUtilsUnitTest.java

示例12: testNonStrictBAM

import htsjdk.samtools.ValidationStringency; //导入依赖的package包/类
@Test(expectedExceptions = UserException.class)
public void testNonStrictBAM() {
    final File normalOutputFile = createTempFile("normal-test",".txt");
    final File tumorOutputFile = createTempFile("tumor-test",".txt");

    final String[] arguments = {
            "-" + ExomeStandardArgumentDefinitions.NORMAL_BAM_FILE_SHORT_NAME, NON_STRICT_BAM_FILE.getAbsolutePath(),
            "-" + ExomeStandardArgumentDefinitions.TUMOR_BAM_FILE_SHORT_NAME, TUMOR_BAM_FILE.getAbsolutePath(),
            "-" + ExomeStandardArgumentDefinitions.SNP_FILE_SHORT_NAME, SNP_FILE.getAbsolutePath(),
            "-" + StandardArgumentDefinitions.REFERENCE_SHORT_NAME, REF_FILE.getAbsolutePath(),
            "-" + ExomeStandardArgumentDefinitions.NORMAL_ALLELIC_COUNTS_FILE_SHORT_NAME, normalOutputFile.getAbsolutePath(),
            "-" + ExomeStandardArgumentDefinitions.TUMOR_ALLELIC_COUNTS_FILE_SHORT_NAME, tumorOutputFile.getAbsolutePath(),
            "--VALIDATION_STRINGENCY", ValidationStringency.STRICT.toString()
    };
    runCommandLine(arguments);
    //should catch SAMFormatException and throw new UserException with --VALIDATION_STRINGENCY STRICT
}
 
开发者ID:broadinstitute,项目名称:gatk-protected,代码行数:18,代码来源:GetHetCoverageIntegrationTest.java

示例13: readLaneTiles

import htsjdk.samtools.ValidationStringency; //导入依赖的package包/类
/** Returns a partitioned collection of lane number to Tile objects from the provided basecall directory. */
public static Map<Integer, ? extends Collection<Tile>> readLaneTiles(final File illuminaRunDirectory,
                                                                     final ReadStructure readStructure,
                                                                     final ValidationStringency validationStringency,
                                                                     final boolean isNovaSeq) {
    final Collection<Tile> tiles;
    try {
        File tileMetricsOutFile = TileMetricsUtil.renderTileMetricsFileFromBasecallingDirectory(illuminaRunDirectory, isNovaSeq);
        if (isNovaSeq) {
            tiles = TileMetricsUtil.parseTileMetrics(
                    tileMetricsOutFile,
                    TileMetricsUtil.renderPhasingMetricsFilesFromBasecallingDirectory(illuminaRunDirectory),
                    readStructure,
                    validationStringency);
        } else {
            tiles = TileMetricsUtil.parseTileMetrics(tileMetricsOutFile,
                    readStructure,
                    validationStringency
            );
        }
    } catch (final FileNotFoundException e) {
        throw new PicardException("Unable to open laneMetrics file.", e);
    }

    return tiles.stream().filter(tile -> tile.getLaneNumber() > 0).collect(Collectors.groupingBy(Tile::getLaneNumber));
}
 
开发者ID:broadinstitute,项目名称:picard,代码行数:27,代码来源:CollectIlluminaLaneMetrics.java

示例14: standardReheader

import htsjdk.samtools.ValidationStringency; //导入依赖的package包/类
private void standardReheader(final SAMFileHeader replacementHeader) {
    final SamReader recordReader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).validationStringency(ValidationStringency.SILENT).open(INPUT);
    if (replacementHeader.getSortOrder() != recordReader.getFileHeader().getSortOrder()) {
        throw new PicardException("Sort orders of INPUT (" + recordReader.getFileHeader().getSortOrder().name() +
                ") and HEADER (" + replacementHeader.getSortOrder().name() + ") do not agree.");
    }
    final SAMFileWriter writer = new SAMFileWriterFactory().makeSAMOrBAMWriter(replacementHeader, true, OUTPUT);

    final ProgressLogger progress = new ProgressLogger(Log.getInstance(ReplaceSamHeader.class));
    for (final SAMRecord rec : recordReader) {
        rec.setHeader(replacementHeader);
        writer.addAlignment(rec);
        progress.record(rec);
    }
    writer.close();
    CloserUtil.close(recordReader);
}
 
开发者ID:broadinstitute,项目名称:picard,代码行数:18,代码来源:ReplaceSamHeader.java

示例15: sortInputs

import htsjdk.samtools.ValidationStringency; //导入依赖的package包/类
/**
 * Merge the inputs and sort them by adding each input's content to a single SortingCollection.
 * <p/>
 * NB: It would be better to have a merging iterator as in MergeSamFiles, as this would perform better for pre-sorted inputs.
 * Here, we are assuming inputs are unsorted, and so adding their VariantContexts iteratively is fine for now.
 * MergeVcfs exists for simple merging of presorted inputs.
 *
 * @param readers      - a list of VCFFileReaders, one for each input VCF
 * @param outputHeader - The merged header whose information we intend to use in the final output file
 */
private SortingCollection<VariantContext> sortInputs(final List<VCFFileReader> readers, final VCFHeader outputHeader) {
    final ProgressLogger readProgress = new ProgressLogger(log, 25000, "read", "records");

    // NB: The default MAX_RECORDS_IN_RAM may not be appropriate here. VariantContexts are smaller than SamRecords
    // We would have to play around empirically to find an appropriate value. We are not performing this optimization at this time.
    final SortingCollection<VariantContext> sorter =
            SortingCollection.newInstance(
                    VariantContext.class,
                    new VCFRecordCodec(outputHeader, VALIDATION_STRINGENCY != ValidationStringency.STRICT),
                    outputHeader.getVCFRecordComparator(),
                    MAX_RECORDS_IN_RAM,
                    TMP_DIR);
    int readerCount = 1;
    for (final VCFFileReader reader : readers) {
        log.info("Reading entries from input file " + readerCount);
        for (final VariantContext variantContext : reader) {
            sorter.add(variantContext);
            readProgress.record(variantContext.getContig(), variantContext.getStart());
        }
        reader.close();
        readerCount++;
    }
    return sorter;
}
 
开发者ID:broadinstitute,项目名称:picard,代码行数:35,代码来源:SortVcf.java


注:本文中的htsjdk.samtools.ValidationStringency类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。