本文整理汇总了Java中htsjdk.samtools.util.BlockCompressedOutputStream.flush方法的典型用法代码示例。如果您正苦于以下问题:Java BlockCompressedOutputStream.flush方法的具体用法?Java BlockCompressedOutputStream.flush怎么用?Java BlockCompressedOutputStream.flush使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类htsjdk.samtools.util.BlockCompressedOutputStream
的用法示例。
在下文中一共展示了BlockCompressedOutputStream.flush方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: blockCompressAndIndex
import htsjdk.samtools.util.BlockCompressedOutputStream; //导入方法依赖的package包/类
/**
* Block compress input file and create associated tabix index. Newly created file and index are
* deleted on exit if deleteOnExit true.
* @throws IOException
* @throws InvalidRecordException
* */
private void blockCompressAndIndex(String in, String bgzfOut, boolean deleteOnExit) throws IOException, InvalidRecordException {
File inFile= new File(in);
File outFile= new File(bgzfOut);
LineIterator lin= utils.IOUtils.openURIForLineIterator(inFile.getAbsolutePath());
BlockCompressedOutputStream writer = new BlockCompressedOutputStream(outFile);
long filePosition= writer.getFilePointer();
TabixIndexCreator indexCreator=new TabixIndexCreator(TabixFormat.GFF);
while(lin.hasNext()){
String line = lin.next();
GtfLine gtf= new GtfLine(line.split("\t"));
writer.write(line.getBytes());
writer.write('\n');
indexCreator.addFeature(gtf, filePosition);
filePosition = writer.getFilePointer();
}
writer.flush();
File tbi= new File(bgzfOut + TabixUtils.STANDARD_INDEX_EXTENSION);
if(tbi.exists() && tbi.isFile()){
writer.close();
throw new RuntimeException("Index file exists: " + tbi);
}
Index index = indexCreator.finalizeIndex(writer.getFilePointer());
index.writeBasedOnFeatureFile(outFile);
writer.close();
if(deleteOnExit){
outFile.deleteOnExit();
File idx= new File(outFile.getAbsolutePath() + TabixUtils.STANDARD_INDEX_EXTENSION);
idx.deleteOnExit();
}
}
示例2: writeBAMHeaderToStream
import htsjdk.samtools.util.BlockCompressedOutputStream; //导入方法依赖的package包/类
/**
* Private helper method for {@link #convertHeaderlessHadoopBamShardToBam} that takes a SAMFileHeader and writes it
* to the provided `OutputStream`, correctly encoded for the BAM format and preceded by the BAM magic bytes.
*
* @param samFileHeader SAM header to write
* @param outputStream stream to write the SAM header to
*/
private static void writeBAMHeaderToStream( final SAMFileHeader samFileHeader, final OutputStream outputStream ) {
final BlockCompressedOutputStream blockCompressedOutputStream = new BlockCompressedOutputStream(outputStream, null);
final BinaryCodec outputBinaryCodec = new BinaryCodec(new DataOutputStream(blockCompressedOutputStream));
final String headerString;
final Writer stringWriter = new StringWriter();
new SAMTextHeaderCodec().encode(stringWriter, samFileHeader, true);
headerString = stringWriter.toString();
outputBinaryCodec.writeBytes(ReadUtils.BAM_MAGIC);
// calculate and write the length of the SAM file header text and the header text
outputBinaryCodec.writeString(headerString, true, false);
// write the sequences binarily. This is redundant with the text header
outputBinaryCodec.writeInt(samFileHeader.getSequenceDictionary().size());
for (final SAMSequenceRecord sequenceRecord: samFileHeader.getSequenceDictionary().getSequences()) {
outputBinaryCodec.writeString(sequenceRecord.getSequenceName(), true, true);
outputBinaryCodec.writeInt(sequenceRecord.getSequenceLength());
}
try {
blockCompressedOutputStream.flush();
} catch (final IOException ioe) {
throw new RuntimeIOException(ioe);
}
}
示例3: apply
import htsjdk.samtools.util.BlockCompressedOutputStream; //导入方法依赖的package包/类
@Override
public OrderedByteArray apply(OrderedByteArray object) {
if (object == null)
throw new NullPointerException();
log.debug("processing container " + object.order);
Container container;
try {
container = ContainerIO.readContainer(header.getVersion(), new ByteArrayInputStream(object.bytes));
if (container.isEOF())
return null;
ArrayList<CramCompressionRecord> records = new ArrayList<CramCompressionRecord>(container.nofRecords);
parser.getRecords(container, records, ValidationStringency.SILENT);
n.normalize(records, null, 0, container.header.substitutionMatrix);
ByteArrayOutputStream bamBAOS = new ByteArrayOutputStream();
BlockCompressedOutputStream os = new BlockCompressedOutputStream(bamBAOS, null);
codec.setOutputStream(os);
for (CramCompressionRecord record : records) {
SAMRecord samRecord = f.create(record);
codec.encode(samRecord);
}
os.flush();
OrderedByteArray bb = new OrderedByteArray();
bb.bytes = bamBAOS.toByteArray();
bb.order = object.order;
log.debug(String.format("Converted OBA %d, records %d", object.order, records.size()));
return bb;
} catch (IOException | IllegalArgumentException | IllegalAccessException e) {
throw new RuntimeException(e);
}
}
示例4: writeHeader
import htsjdk.samtools.util.BlockCompressedOutputStream; //导入方法依赖的package包/类
protected static void writeHeader(final OutputStream outputStream, final SAMFileHeader samFileHeader) {
final BlockCompressedOutputStream blockCompressedOutputStream = new BlockCompressedOutputStream(outputStream,
null);
final BinaryCodec outputBinaryCodec = new BinaryCodec(new DataOutputStream(blockCompressedOutputStream));
writeHeader(outputBinaryCodec, samFileHeader);
try {
blockCompressedOutputStream.flush();
} catch (final IOException ioe) {
throw new RuntimeIOException(ioe);
}
}