当前位置: 首页>>代码示例>>Java>>正文


Java SAMTextHeaderCodec类代码示例

本文整理汇总了Java中htsjdk.samtools.SAMTextHeaderCodec的典型用法代码示例。如果您正苦于以下问题:Java SAMTextHeaderCodec类的具体用法?Java SAMTextHeaderCodec怎么用?Java SAMTextHeaderCodec使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


SAMTextHeaderCodec类属于htsjdk.samtools包,在下文中一共展示了SAMTextHeaderCodec类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: readSAMFileHeader

import htsjdk.samtools.SAMTextHeaderCodec; //导入依赖的package包/类
private static SAMFileHeader readSAMFileHeader(InputStream inputStream,
		final String id) throws IOException {
	final SecramBlock block = SecramBlock.readFromInputStream(inputStream);

	inputStream = new ByteArrayInputStream(block.getRawContent());

	final ByteBuffer buffer = ByteBuffer.allocate(4);
	buffer.order(ByteOrder.LITTLE_ENDIAN);
	for (int i = 0; i < 4; i++)
		buffer.put((byte) inputStream.read());
	buffer.flip();
	final int size = buffer.asIntBuffer().get();

	final DataInputStream dataInputStream = new DataInputStream(inputStream);
	final byte[] bytes = new byte[size];
	dataInputStream.readFully(bytes);

	final BufferedLineReader bufferedLineReader = new BufferedLineReader(
			new ByteArrayInputStream(bytes));
	final SAMTextHeaderCodec codec = new SAMTextHeaderCodec();
	return codec.decode(bufferedLineReader, id);
}
 
开发者ID:acs6610987,项目名称:secram,代码行数:23,代码来源:SecramIO.java

示例2: testAddCommentsToBam

import htsjdk.samtools.SAMTextHeaderCodec; //导入依赖的package包/类
@Test
public void testAddCommentsToBam() throws Exception {
    final File outputFile = File.createTempFile("addCommentsToBamTest.", BamFileIoUtils.BAM_FILE_EXTENSION);
    outputFile.deleteOnExit();
    runIt(INPUT_FILE, outputFile, commentList);

    final SAMFileHeader newHeader = SamReaderFactory.makeDefault().getFileHeader(outputFile);

    // The original comments are massaged when they're added to the header. Perform the same massaging here,
    // and then compare the lists
    final List<String> massagedComments = new LinkedList<String>();
    for (final String comment : commentList) {
        massagedComments.add(SAMTextHeaderCodec.COMMENT_PREFIX + comment);
    }

    Assert.assertEquals(newHeader.getComments(), massagedComments);
    outputFile.delete();
}
 
开发者ID:broadinstitute,项目名称:picard,代码行数:19,代码来源:AddCommentsToBamTest.java

示例3: writeHeader

import htsjdk.samtools.SAMTextHeaderCodec; //导入依赖的package包/类
private void writeHeader(final SAMFileHeader header) {
	binaryCodec.writeBytes("BAM\001".getBytes(Charset.forName("UTF8")));

	final Writer sw = new StringWriter();
	new SAMTextHeaderCodec().encode(sw, header);

	binaryCodec.writeString(sw.toString(), true, false);

	final SAMSequenceDictionary dict = header.getSequenceDictionary();

	binaryCodec.writeInt(dict.size());
	for (final SAMSequenceRecord rec : dict.getSequences()) {
		binaryCodec.writeString(rec.getSequenceName(), true, true);
		binaryCodec.writeInt   (rec.getSequenceLength());
	}
}
 
开发者ID:HadoopGenomics,项目名称:Hadoop-BAM,代码行数:17,代码来源:BAMRecordWriter.java

示例4: loadFastaDictionary

import htsjdk.samtools.SAMTextHeaderCodec; //导入依赖的package包/类
/**
 * Given an InputStream connected to a fasta dictionary, returns its sequence dictionary
 *
 * Note: does not close the InputStream it's passed
 *
 * @param fastaDictionaryStream InputStream connected to a fasta dictionary
 * @return the SAMSequenceDictionary from the fastaDictionaryStream
 */
public static SAMSequenceDictionary loadFastaDictionary( final InputStream fastaDictionaryStream ) {
    // Don't close the reader when we're done, since we don't want to close the client's InputStream for them
    final BufferedLineReader reader = new BufferedLineReader(fastaDictionaryStream);

    final SAMTextHeaderCodec codec = new SAMTextHeaderCodec();
    final SAMFileHeader header = codec.decode(reader, fastaDictionaryStream.toString());

    // Make sure we have a valid sequence dictionary before continuing:
    if (header.getSequenceDictionary() == null || header.getSequenceDictionary().isEmpty()) {
        throw new UserException.MalformedFile (
                "Could not read sequence dictionary from given fasta stream " +
                        fastaDictionaryStream
        );
    }

    return header.getSequenceDictionary();
}
 
开发者ID:broadinstitute,项目名称:gatk,代码行数:26,代码来源:ReferenceUtils.java

示例5: readSAMHeader

import htsjdk.samtools.SAMTextHeaderCodec; //导入依赖的package包/类
/**
 * Read the SAM header of a SAM file.
 * @param is input stream
 * @return a String with the SAM header
 */
public static String readSAMHeader(final InputStream is) {

  if (is == null) {
    throw new NullPointerException("The input stream is null.");
  }

  // Read SAM file header
  final SamReader reader =
      SamReaderFactory.makeDefault().open(SamInputResource.of(is));
  final SAMFileHeader header = reader.getFileHeader();

  // Close reader
  // reader.close();

  final StringWriter headerTextBuffer = new StringWriter();
  new SAMTextHeaderCodec().encode(headerTextBuffer, header);

  return headerTextBuffer.toString();
}
 
开发者ID:GenomicParisCentre,项目名称:eoulsan,代码行数:25,代码来源:SAMUtils.java

示例6: writeHeader

import htsjdk.samtools.SAMTextHeaderCodec; //导入依赖的package包/类
/**
 * write sam header to file for BufferedWriter
 * 
 * @param SAMFileHeader
 * @param BufferedWriter
 * @return
 */
public static void writeHeader(SAMFileHeader header, BufferedWriter writer)
		throws IOException {
	final Writer sw = new StringWriter();
	new SAMTextHeaderCodec().encode(sw, header);

	writer.write(sw.toString());
	writer.close();
}
 
开发者ID:BGI-flexlab,项目名称:SOAPgaea,代码行数:16,代码来源:SamFileHeaderCodec.java

示例7: streamElPrep

import htsjdk.samtools.SAMTextHeaderCodec; //导入依赖的package包/类
public int streamElPrep(Reducer.Context context, String output, String rg, 
            int threads, SAMRecordIterator SAMit, 
            SAMFileHeader header, String dictFile, boolean updateRG, boolean keepDups, String RGID) throws InterruptedException, IOException, QualityException {
        long startTime = System.currentTimeMillis();
        String customArgs = HalvadeConf.getCustomArgs(context.getConfiguration(), "elprep", "");  
        String[] command = CommandGenerator.elPrep(bin, "/dev/stdin", output, threads, true, rg, null, !keepDups, customArgs);
//        runProcessAndWait(command);
        ProcessBuilderWrapper builder = new ProcessBuilderWrapper(command, null);
        builder.startProcess(true);        
        BufferedWriter localWriter = builder.getSTDINWriter();
        
        // write header
        final StringWriter headerTextBuffer = new StringWriter();
        new SAMTextHeaderCodec().encode(headerTextBuffer, header);
        final String headerText = headerTextBuffer.toString();
        localWriter.write(headerText, 0, headerText.length());
        
        
        SAMRecord sam;
        int reads = 0;
        while(SAMit.hasNext()) {
            sam = SAMit.next();
            if(updateRG)
                sam.setAttribute(SAMTag.RG.name(), RGID);
            String samString = sam.getSAMString();
            localWriter.write(samString, 0, samString.length());
            reads++;
        }
        localWriter.flush();
        localWriter.close();
                
        int error = builder.waitForCompletion();
        if(error != 0)
            throw new ProcessException("elPrep", error);
        long estimatedTime = System.currentTimeMillis() - startTime;
        Logger.DEBUG("estimated time: " + estimatedTime / 1000);
        if(context != null)
            context.getCounter(HalvadeCounters.TIME_ELPREP).increment(estimatedTime);
        return reads;
    }
 
开发者ID:biointec,项目名称:halvade,代码行数:41,代码来源:PreprocessingTools.java

示例8: indexedDataSource

import htsjdk.samtools.SAMTextHeaderCodec; //导入依赖的package包/类
@DataProvider(name = "indexed")
public Object[][] indexedDataSource() {
    final SAMFileHeader header = new SAMTextHeaderCodec().decode(
            new StringLineReader("@HD\tVN:1.4\tSO:coordinate\n"
                    + "@SQ\tSN:2L\tLN:59940\n"
                    // TODO: this header should be changed by using 2L.fragment.fa
                    + "@PG\tID:bwa\tPN:bwa\tVN:0.7.12-r1039\tCL:bwa mem fragment.fa SRR1931701_1.fq SRR1931701_2.fq"
            ), "testIndexed");
    return new Object[][] {
            {getTestFile("small.mapped.sort.bam").getAbsolutePath(), FastqQualityFormat.Standard, header, 206, 118}
    };
}
 
开发者ID:magicDGS,项目名称:ReadTools,代码行数:13,代码来源:RTDataSourceUnitTest.java

示例9: writeBAMHeaderToStream

import htsjdk.samtools.SAMTextHeaderCodec; //导入依赖的package包/类
/**
 * Private helper method for {@link #convertHeaderlessHadoopBamShardToBam} that takes a SAMFileHeader and writes it
 * to the provided `OutputStream`, correctly encoded for the BAM format and preceded by the BAM magic bytes.
 *
 * @param samFileHeader SAM header to write
 * @param outputStream stream to write the SAM header to
 */
private static void writeBAMHeaderToStream( final SAMFileHeader samFileHeader, final OutputStream outputStream ) {
    final BlockCompressedOutputStream blockCompressedOutputStream = new BlockCompressedOutputStream(outputStream, null);
    final BinaryCodec outputBinaryCodec = new BinaryCodec(new DataOutputStream(blockCompressedOutputStream));

    final String headerString;
    final Writer stringWriter = new StringWriter();
    new SAMTextHeaderCodec().encode(stringWriter, samFileHeader, true);
    headerString = stringWriter.toString();

    outputBinaryCodec.writeBytes(ReadUtils.BAM_MAGIC);

    // calculate and write the length of the SAM file header text and the header text
    outputBinaryCodec.writeString(headerString, true, false);

    // write the sequences binarily.  This is redundant with the text header
    outputBinaryCodec.writeInt(samFileHeader.getSequenceDictionary().size());
    for (final SAMSequenceRecord sequenceRecord: samFileHeader.getSequenceDictionary().getSequences()) {
        outputBinaryCodec.writeString(sequenceRecord.getSequenceName(), true, true);
        outputBinaryCodec.writeInt(sequenceRecord.getSequenceLength());
    }

    try {
        blockCompressedOutputStream.flush();
    } catch (final IOException ioe) {
        throw new RuntimeIOException(ioe);
    }
}
 
开发者ID:broadinstitute,项目名称:gatk,代码行数:35,代码来源:SparkUtils.java

示例10: HDF5SimpleCountCollection

import htsjdk.samtools.SAMTextHeaderCodec; //导入依赖的package包/类
/**
 * DEV NOTE: If you are adding attributes that are neither RealMatrix nor a primitive,
 * you must follow the pattern in the constructor (i.e. the Lazy loading pattern).
 * Otherwise, some operations will hang.
 */
HDF5SimpleCountCollection(final HDF5File file) {
    Utils.nonNull(file, "The input file cannot be null.");
    this.file = file;
    sampleName = new Lazy<>(() -> file.readStringArray(SAMPLE_NAME_PATH)[0]);
    sequenceDictionary = new Lazy<>(() -> {
        final String sequenceDictionaryString = file.readStringArray(SEQUENCE_DICTIONARY_PATH)[0];
        return new SAMTextHeaderCodec()
                .decode(BufferedLineReader.fromString(sequenceDictionaryString), file.getFile().getAbsolutePath())
                .getSequenceDictionary();
    });
    intervals = new Lazy<>(() -> HDF5Utils.readIntervals(file, INTERVALS_GROUP_NAME));
    counts = new Lazy<>(() -> new Array2DRowRealMatrix(file.readDoubleMatrix(COUNTS_PATH)));
}
 
开发者ID:broadinstitute,项目名称:gatk,代码行数:19,代码来源:HDF5SimpleCountCollection.java

示例11: HDF5SVDReadCountPanelOfNormals

import htsjdk.samtools.SAMTextHeaderCodec; //导入依赖的package包/类
/**
 * DEV NOTE: If you are adding attributes that are neither RealMatrix nor a primitive,
 * you must follow the pattern in the constructor (i.e. the Lazy loading pattern).
 * Otherwise, some operations will hang.
 */
private HDF5SVDReadCountPanelOfNormals(final HDF5File file) {
    Utils.nonNull(file);
    IOUtils.canReadFile(file.getFile());
    this.file = file;
    sequenceDictionary = new Lazy<>(() -> {
        final String sequenceDictionaryString = file.readStringArray(SEQUENCE_DICTIONARY_PATH)[0];
        return new SAMTextHeaderCodec()
                .decode(BufferedLineReader.fromString(sequenceDictionaryString), file.getFile().getAbsolutePath())
                .getSequenceDictionary();
    });
    originalIntervals = new Lazy<>(() -> HDF5Utils.readIntervals(file, ORIGINAL_INTERVALS_PATH));
    panelIntervals = new Lazy<>(() -> HDF5Utils.readIntervals(file, PANEL_INTERVALS_PATH));
}
 
开发者ID:broadinstitute,项目名称:gatk,代码行数:19,代码来源:HDF5SVDReadCountPanelOfNormals.java

示例12: writeHeader

import htsjdk.samtools.SAMTextHeaderCodec; //导入依赖的package包/类
/**
 * Writes a header to a BAM file. Might need to regenerate the String
 * version of the header, if one already has both the samFileHeader and the
 * String, use the version of this method which takes both.
 */
static void writeHeader(final BinaryCodec outputBinaryCodec, final SAMFileHeader samFileHeader) {
	// Do not use SAMFileHeader.getTextHeader() as it is not updated when
	// changes to the underlying object are made
	final String headerString;
	final Writer stringWriter = new StringWriter();
	new SAMTextHeaderCodec().encode(stringWriter, samFileHeader, true);
	headerString = stringWriter.toString();

	writeHeader(outputBinaryCodec, samFileHeader, headerString);
}
 
开发者ID:enasequence,项目名称:cramtools,代码行数:16,代码来源:SAMFileHeader_Utils.java

示例13: toByteArray

import htsjdk.samtools.SAMTextHeaderCodec; //导入依赖的package包/类
private static byte[] toByteArray(SAMFileHeader samFileHeader) {
	ExposedByteArrayOutputStream headerBodyOS = new ExposedByteArrayOutputStream();
	OutputStreamWriter outStreamWriter = new OutputStreamWriter(headerBodyOS);
	(new SAMTextHeaderCodec()).encode(outStreamWriter, samFileHeader);

	try {
		outStreamWriter.close();
	} catch (IOException var8) {
		throw new RuntimeException(var8);
	}

	ByteBuffer buf = ByteBuffer.allocate(4);
	buf.order(ByteOrder.LITTLE_ENDIAN);
	buf.putInt(headerBodyOS.size());
	buf.flip();
	byte[] bytes = new byte[buf.limit()];
	buf.get(bytes);
	ByteArrayOutputStream headerOS = new ByteArrayOutputStream();

	try {
		headerOS.write(bytes);
		headerOS.write(headerBodyOS.getBuffer(), 0, headerBodyOS.size());
	} catch (IOException var7) {
		throw new RuntimeException(var7);
	}

	return headerOS.toByteArray();
}
 
开发者ID:enasequence,项目名称:cramtools,代码行数:29,代码来源:TestBAMRecordView.java

示例14: writeSamText

import htsjdk.samtools.SAMTextHeaderCodec; //导入依赖的package包/类
/**
 * This is factored out of doWork only for unit testing.
 */
int writeSamText(PrintStream printStream) {
    try {
        final CloseableIterator<SAMRecord> samRecordsIterator;
        final SamReader samReader = SamReaderFactory.makeDefault()
                .referenceSequence(REFERENCE_SEQUENCE)
                .open(SamInputResource.of(INPUT));

        // if we are only using the header or we aren't using intervals, then use the reader as the iterator.
        // otherwise use the SamRecordIntervalIteratorFactory to make an interval-ed iterator
        if (HEADER_ONLY || INTERVAL_LIST == null) {
            samRecordsIterator = samReader.iterator();
        } else {
            IOUtil.assertFileIsReadable(INTERVAL_LIST);

            final List<Interval> intervals = IntervalList.fromFile(INTERVAL_LIST).uniqued().getIntervals();
            samRecordsIterator = new SamRecordIntervalIteratorFactory().makeSamRecordIntervalIterator(samReader, intervals, samReader.hasIndex());
        }
        final AsciiWriter writer = new AsciiWriter(printStream);
        final SAMFileHeader header = samReader.getFileHeader();
        if (!RECORDS_ONLY) {
            if (header.getTextHeader() != null) {
                writer.write(header.getTextHeader());
            } else {
                // Headers that are too large are not retained as text, so need to regenerate text
                new SAMTextHeaderCodec().encode(writer, header, true);
            }
        }
        if (!HEADER_ONLY) {
            while (samRecordsIterator.hasNext()) {
                final SAMRecord rec = samRecordsIterator.next();

                if (printStream.checkError()) {
                    return 1;
                }

                if (this.ALIGNMENT_STATUS == AlignmentStatus.Aligned && rec.getReadUnmappedFlag()) continue;
                if (this.ALIGNMENT_STATUS == AlignmentStatus.Unaligned && !rec.getReadUnmappedFlag()) continue;

                if (this.PF_STATUS == PfStatus.PF && rec.getReadFailsVendorQualityCheckFlag()) continue;
                if (this.PF_STATUS == PfStatus.NonPF && !rec.getReadFailsVendorQualityCheckFlag()) continue;
                writer.write(rec.getSAMString());
            }
        }
        writer.flush();
        if (printStream.checkError()) {
            return 1;
        }
        CloserUtil.close(writer);
        CloserUtil.close(samRecordsIterator);
        return 0;
    } catch (IOException e) {
        throw new PicardException("Exception writing SAM text", e);
    }
}
 
开发者ID:broadinstitute,项目名称:picard,代码行数:58,代码来源:ViewSam.java

示例15: writeToFile

import htsjdk.samtools.SAMTextHeaderCodec; //导入依赖的package包/类
/** Writes out a HaplotypeMap file with the contents of this map. */
public void writeToFile(final File file) {
    try {
        final BufferedWriter out = new BufferedWriter(new OutputStreamWriter(IOUtil.openFileForWriting(file)));
        final FormatUtil format = new FormatUtil();

        // Write out the header
        if (this.header != null) {
            final SAMTextHeaderCodec codec = new SAMTextHeaderCodec();
            codec.encode(out, this.header);
        }

        // Write the header for the entries.
        out.write("#CHROMOSOME\tPOSITION\tNAME\tMAJOR_ALLELE\tMINOR_ALLELE\tMAF\tANCHOR_SNP\tPANELS");
        out.newLine();

        final List<HaplotypeMapFileEntry> entries = new ArrayList<>();
        for (final HaplotypeBlock block : this.getHaplotypes()) {
            String anchor = null;
            final SortedSet<Snp> snps = new TreeSet<>(block.getSnps());

            for (final Snp snp : snps) {
                entries.add(new HaplotypeMapFileEntry(snp.getChrom(), snp.getPos(), snp.getName(),
                        snp.getAllele1(), snp.getAllele2(), snp.getMaf(), anchor, snp.getFingerprintPanels()));

                if (anchor == null) {
                    anchor = snp.getName();
                }
            }
        }

        Collections.sort(entries);
        for (final HaplotypeMapFileEntry entry : entries) {
            out.write(entry.chromosome + "\t");
            out.write(format.format(entry.position) + "\t");
            out.write(entry.snpName + "\t");
            out.write((char)entry.majorAllele + "\t");
            out.write((char)entry.minorAllele + "\t");
            out.write(format.format(entry.minorAlleleFrequency) + "\t");
            if (entry.anchorSnp != null) {
                out.write(entry.anchorSnp);
            }
            out.write("\t");
            if (entry.getPanels() != null) {
                out.write(entry.getPanels());
            }
            out.newLine();
        }
        out.flush();
        out.close();
    }
    catch (IOException ioe) {
        throw new PicardException("Error writing out haplotype map to file: " + file.getAbsolutePath(), ioe);
    }
}
 
开发者ID:broadinstitute,项目名称:picard,代码行数:56,代码来源:HaplotypeMap.java


注:本文中的htsjdk.samtools.SAMTextHeaderCodec类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。