本文整理汇总了Java中htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder.build方法的典型用法代码示例。如果您正苦于以下问题:Java VariantContextWriterBuilder.build方法的具体用法?Java VariantContextWriterBuilder.build怎么用?Java VariantContextWriterBuilder.build使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder
的用法示例。
在下文中一共展示了VariantContextWriterBuilder.build方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: createVCFWriter
import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder; //导入方法依赖的package包/类
public static VariantContextWriter createVCFWriter(final File outFile,VCFHeader header) {
Utils.nonNull(outFile);
VariantContextWriterBuilder vcWriterBuilder = new VariantContextWriterBuilder().clearOptions()
.setOutputFile(outFile);
if (lenientVCFProcessing) {
vcWriterBuilder = vcWriterBuilder.setOption(Options.ALLOW_MISSING_FIELDS_IN_HEADER);
}
if (createOutputVariantIndex && null != header) {
vcWriterBuilder = vcWriterBuilder.setOption(Options.INDEX_ON_THE_FLY);
}
if (createOutputVariantMD5) {
vcWriterBuilder.setCreateMD5();
}
if (null != header) {
vcWriterBuilder = vcWriterBuilder.setReferenceDictionary(header.getSequenceDictionary());
}
return vcWriterBuilder.build();
}
示例2: getVariantContextWriter
import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder; //导入方法依赖的package包/类
/** Gets the variant context writer if the output VCF is to be written, otherwise empty. */
private Optional<VariantContextWriter> getVariantContextWriter(final VCFFileReader truthReader, final VCFFileReader callReader) {
if (OUTPUT_VCF) {
final File outputVcfFile = new File(OUTPUT + OUTPUT_VCF_FILE_EXTENSION);
final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
.setOutputFile(outputVcfFile)
.setReferenceDictionary(callReader.getFileHeader().getSequenceDictionary())
.setOption(Options.ALLOW_MISSING_FIELDS_IN_HEADER)
.setOption(Options.INDEX_ON_THE_FLY);
final VariantContextWriter writer = builder.build();
// create the output header
final List<String> sampleNames = Arrays.asList(OUTPUT_VCF_CALL_SAMPLE_NAME, OUTPUT_VCF_TRUTH_SAMPLE_NAME);
final Set<VCFHeaderLine> headerLines = new HashSet<>();
headerLines.addAll(callReader.getFileHeader().getMetaDataInInputOrder());
headerLines.addAll(truthReader.getFileHeader().getMetaDataInInputOrder());
headerLines.add(CONTINGENCY_STATE_HEADER_LINE);
writer.writeHeader(new VCFHeader(headerLines, sampleNames));
return Optional.of(writer);
}
else {
return Optional.empty();
}
}
示例3: getVariantContextWriter
import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder; //导入方法依赖的package包/类
private static VariantContextWriter getVariantContextWriter(final OutputStream outputStream,
final SAMSequenceDictionary referenceSequenceDictionary) {
VariantContextWriterBuilder vcWriterBuilder = new VariantContextWriterBuilder()
.clearOptions()
.setOutputStream(outputStream);
if (null != referenceSequenceDictionary) {
vcWriterBuilder = vcWriterBuilder.setReferenceDictionary(referenceSequenceDictionary);
}
// todo: remove this when things are solid?
vcWriterBuilder = vcWriterBuilder.setOption(Options.ALLOW_MISSING_FIELDS_IN_HEADER);
for (final Options opt : new Options[]{}) {
vcWriterBuilder = vcWriterBuilder.setOption(opt);
}
return vcWriterBuilder.build();
}
示例4: openVariantContextWriter
import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder; //导入方法依赖的package包/类
/**
* Return a new {@link VariantContextWriter} that uses the header from <code>reader</code> but has the header
* extended header through {@link #extendHeaderFields}.
*
* @param header
* the VCF header to use for the construction
* @param fileName
* path to output file
* @param additionalHeaderLines
* additional {@link VCFHeaderLine}s to add
* @param generateIndex
* whether or not to generate an index
* @return A correct writer for variantContexts
*/
public static VariantContextWriter openVariantContextWriter(VCFHeader header, String fileName,
Collection<VCFHeaderLine> additionalHeaderLines, boolean generateIndex) {
VariantContextWriterBuilder builder = makeBuilder(header);
builder.setOutputFile(new File(fileName));
if (!generateIndex)
builder.unsetOption(Options.INDEX_ON_THE_FLY);
// construct VariantContextWriter and write out header
VariantContextWriter out = builder.build();
final VCFHeader updatedHeader = extendHeaderFields(new VCFHeader(header));
for (VCFHeaderLine headerLine : additionalHeaderLines)
updatedHeader.addMetaDataLine(headerLine);
out.writeHeader(updatedHeader);
return out;
}
示例5: createVariantContextWriter
import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder; //导入方法依赖的package包/类
/**
* create a VariantContextWriter
* @param OUT output file or null to stdout
* @return
* @throws IOException
*/
public static VariantContextWriter createVariantContextWriter(final File OUT) throws IOException
{
if(OUT==null)
{
return createVariantContextWriterToStdout();
}
else
{
IOUtil.assertFileIsWritable(OUT);
final VariantContextWriterBuilder vcwb=new VariantContextWriterBuilder();
vcwb.setCreateMD5(false);
vcwb.setReferenceDictionary(null);
vcwb.clearOptions();
vcwb.setOutputFile(OUT);
return new VariantContextWriterDelayedFlush(vcwb.build());
}
}
示例6: openVCFWriter
import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder; //导入方法依赖的package包/类
private VariantContextWriter openVCFWriter(final File outputFile, final Set<String> samples) {
final VariantContextWriterBuilder builder = new VariantContextWriterBuilder();
builder.setOutputFile(outputFile);
builder.clearOptions();
final VariantContextWriter result = builder.build();
final VCFHeader header = new VCFHeader(Collections.emptySet(), samples);
CopyNumberTriStateAllele.addHeaderLinesTo(header);
EvaluationClass.addHeaderLinesTo(header);
// Format annotations.
header.addMetaDataLine(new VCFFormatHeaderLine(VCFConstants.GENOTYPE_KEY, 1, VCFHeaderLineType.Character, "Called genotype"));
header.addMetaDataLine(new VCFFormatHeaderLine(VariantEvaluationContext.CALL_QUALITY_KEY, 1, VCFHeaderLineType.Float, "Quality of the call"));
header.addMetaDataLine(new VCFFormatHeaderLine(VariantEvaluationContext.CALLED_SEGMENTS_COUNT_KEY, 1, VCFHeaderLineType.Integer, "Number of called segments that overlap with the truth"));
header.addMetaDataLine(new VCFFormatHeaderLine(VariantEvaluationContext.CALLED_ALLELE_COUNTS_KEY, VCFHeaderLineCount.G, VCFHeaderLineType.Integer, "Called allele count for mixed calls"));
header.addMetaDataLine(new VCFFormatHeaderLine(VariantEvaluationContext.TRUTH_COPY_FRACTION_KEY, 1, VCFHeaderLineType.Float, "Truth copy fraction estimated"));
header.addMetaDataLine(new VCFFormatHeaderLine(VariantEvaluationContext.TRUTH_QUALITY_KEY, 1, VCFHeaderLineType.Float, "Truth call quality"));
header.addMetaDataLine(new VCFFormatHeaderLine(VariantEvaluationContext.EVALUATION_CLASS_KEY, 1, VCFHeaderLineType.Character, "The evaluation class for the call or lack of call. It the values of the header key '" + EvaluationClass.VCF_HEADER_KEY + "'"));
header.addMetaDataLine(new VCFFormatHeaderLine(VariantEvaluationContext.TRUTH_GENOTYPE_KEY, 1, VCFHeaderLineType.Character, "The truth genotype"));
header.addMetaDataLine(new VCFFormatHeaderLine(VariantEvaluationContext.CALLED_TARGET_COUNT_KEY, 1, VCFHeaderLineType.Integer, "Number of targets covered by called segments"));
header.addMetaDataLine(new VCFFormatHeaderLine(VariantEvaluationContext.CALL_QUALITY_KEY, 1, VCFHeaderLineType.Float, "1 - The probability of th event in Phred scale (the maximum if ther are more than one segment"));
header.addMetaDataLine(new VCFFormatHeaderLine(VCFConstants.GENOTYPE_QUALITY_KEY, 1, VCFHeaderLineType.Integer, "The quality of the call (the maximum if there are more than one segment"));
header.addMetaDataLine(new VCFFormatHeaderLine(VCFConstants.GENOTYPE_FILTER_KEY, VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.Character, "Genotype filters"));
// Info annotations.
header.addMetaDataLine(new VCFInfoHeaderLine(VariantEvaluationContext.TRUTH_ALLELE_FREQUENCY_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "The frequency of the alternative alleles in the truth callset"));
header.addMetaDataLine(new VCFInfoHeaderLine(VariantEvaluationContext.TRUTH_ALLELE_NUMBER_KEY, 1, VCFHeaderLineType.Integer, "Total number of called alleles in the truth callset"));
header.addMetaDataLine(new VCFInfoHeaderLine(VariantEvaluationContext.CALLS_ALLELE_FREQUENCY_KEY, VCFHeaderLineCount.A, VCFHeaderLineType.Float, "The frequency of the alternative alleles in the actual callset"));
header.addMetaDataLine(new VCFInfoHeaderLine(VariantEvaluationContext.CALLS_ALLELE_NUMBER_KEY, 1, VCFHeaderLineType.Integer, "Total number of called alleles in the actual callset"));
header.addMetaDataLine(new VCFInfoHeaderLine(VariantEvaluationContext.TRUTH_TARGET_COUNT_KEY, 1, VCFHeaderLineType.Integer, "Number of targets overlapped by this variant"));
header.addMetaDataLine(new VCFInfoHeaderLine(VCFConstants.END_KEY, 1, VCFHeaderLineType.Integer, "Stop position for the variant"));
// Filter annotations.
for (final EvaluationFilter filter : EvaluationFilter.values()) {
header.addMetaDataLine(new VCFFilterHeaderLine(filter.name(), filter.description));
header.addMetaDataLine(new VCFFilterHeaderLine(filter.acronym, filter.description));
}
header.addMetaDataLine(new VCFFilterHeaderLine(EvaluationFilter.PASS, "Indicates that it passes all filters"));
result.writeHeader(header);
return result;
}
示例7: doWork
import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder; //导入方法依赖的package包/类
@Override
protected int doWork() {
final ProgressLogger progress = new ProgressLogger(LOG, 10000);
IOUtil.assertFileIsReadable(INPUT);
IOUtil.assertFileIsWritable(OUTPUT);
final VCFFileReader reader = new VCFFileReader(INPUT, REQUIRE_INDEX);
final VCFHeader header = new VCFHeader(reader.getFileHeader());
final SAMSequenceDictionary sequenceDictionary = header.getSequenceDictionary();
if (CREATE_INDEX && sequenceDictionary == null) {
throw new PicardException("A sequence dictionary must be available in the input file when creating indexed output.");
}
final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
.setOutputFile(OUTPUT)
.setReferenceDictionary(sequenceDictionary);
if (CREATE_INDEX)
builder.setOption(Options.INDEX_ON_THE_FLY);
else
builder.unsetOption(Options.INDEX_ON_THE_FLY);
final VariantContextWriter writer = builder.build();
writer.writeHeader(header);
final CloseableIterator<VariantContext> iterator = reader.iterator();
while (iterator.hasNext()) {
final VariantContext context = iterator.next();
writer.add(context);
progress.record(context.getContig(), context.getStart());
}
CloserUtil.close(iterator);
CloserUtil.close(reader);
writer.close();
return 0;
}
示例8: createVCFWriter
import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder; //导入方法依赖的package包/类
/**
* Creates a VariantContextWriter whose outputFile type is based on the extension of the output file name.
* The default options set by VariantContextWriter are cleared before applying ALLOW_MISSING_FIELDS_IN_HEADER (if
* <code>lenientProcessing</code> is set), followed by the set of options specified by any <code>options</code> args.
*
* @param outFile output File for this writer. May not be null.
* @param referenceDictionary required if on the fly indexing is set, otherwise can be null
* @param createMD5 true if an md5 file should be created
* @param options variable length list of additional Options to be set for this writer
* @returns VariantContextWriter must be closed by the caller
*/
public static VariantContextWriter createVCFWriter(
final File outFile,
final SAMSequenceDictionary referenceDictionary,
final boolean createMD5,
final Options... options)
{
Utils.nonNull(outFile);
VariantContextWriterBuilder vcWriterBuilder =
new VariantContextWriterBuilder().clearOptions().setOutputFile(outFile);
if (VariantContextWriterBuilder.OutputType.UNSPECIFIED == getVariantFileTypeFromExtension(outFile)) {
// the only way the user has to specify an output type is by file extension, and htsjdk
// throws if it can't map the file extension to a known vcf type, so fallback to a default
// of VCF
logger.warn(String.format(
"Can't determine output variant file format from output file extension \"%s\". Defaulting to VCF.",
FilenameUtils.getExtension(outFile.getPath())));
vcWriterBuilder = vcWriterBuilder.setOutputFileType(VariantContextWriterBuilder.OutputType.VCF);
}
if (createMD5) {
vcWriterBuilder.setCreateMD5();
}
if (null != referenceDictionary) {
vcWriterBuilder = vcWriterBuilder.setReferenceDictionary(referenceDictionary);
}
for (Options opt : options) {
vcWriterBuilder = vcWriterBuilder.setOption(opt);
}
return vcWriterBuilder.build();
}
示例9: openOutputFile
import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder; //导入方法依赖的package包/类
private VariantContextWriter openOutputFile() {
VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
.setReferenceDictionary(fasta.getSequenceDictionary()).setOutputFile(options.getPathOutputVCF());
if (options.getPathOutputVCF().endsWith(".gz") || options.getPathOutputVCF().endsWith(".bcf"))
builder.setOption(Options.INDEX_ON_THE_FLY);
else
builder.unsetOption(Options.INDEX_ON_THE_FLY);
VariantContextWriter writer = builder.build();
VCFHeader header = new VCFHeader();
int i = 0;
for (SAMSequenceRecord record : fasta.getSequenceDictionary().getSequences()) {
Map<String, String> mapping = new TreeMap<String, String>();
mapping.put("ID", record.getSequenceName());
mapping.put("length", Integer.toString(record.getSequenceLength()));
header.addMetaDataLine(new VCFContigHeaderLine(mapping, i++));
}
header.addMetaDataLine(new VCFSimpleHeaderLine("ALT", "ERROR", "Error in conversion"));
header.addMetaDataLine(new VCFFilterHeaderLine("PARSE_ERROR",
"Problem in parsing original HGVS variant string, written out as variant at 1:g.1N>N"));
header.addMetaDataLine(new VCFInfoHeaderLine("ERROR_MESSAGE", 1, VCFHeaderLineType.String, "Error message"));
header.addMetaDataLine(new VCFInfoHeaderLine("ORIG_VAR", 1, VCFHeaderLineType.String,
"Original HGVS variant string from input file to hgvs-to-vcf"));
writer.writeHeader(header);
return writer;
}
示例10: open
import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder; //导入方法依赖的package包/类
@Override
public void open(final ExecutionContext executionContext) throws ItemStreamException {
if(this.filenameFactory==null) throw new ItemStreamException("resource is not defined");
try {
final String filename = this.filenameFactory.apply(executionContext);
if(StringUtil.isBlank(filename)) throw new ItemStreamException("No output file defined.");
if(LOG.isInfoEnabled()) LOG.info("Opening "+filename+" for writing");
if(!Arrays.stream(IOUtil.VCF_EXTENSIONS).anyMatch(SUFF->filename.endsWith(SUFF)))
{
throw new ItemStreamException("Bad extension for a VCF file:" + filename);
}
final File vcfFile = new File(filename);
VCFHeader header= SpringBatchUtils.getVcfHeader(executionContext);
final VariantContextWriterBuilder vcwb = new VariantContextWriterBuilder();
vcwb.setOutputFile(vcfFile);
if(this.reference!=null)
{
final SAMSequenceDictionary dic=SAMSequenceDictionaryExtractor.extractDictionary(this.reference);
vcwb.setReferenceDictionary(dic);
if(header.getSequenceDictionary()==null) {
header = new VCFHeader(header);
header.setSequenceDictionary(dic);
}
}
else
{
vcwb.setReferenceDictionary(header.getSequenceDictionary());
}
vcwb.setCreateMD5(this.createMD5);
this.vcw = vcwb.build();
this.vcw.writeHeader(header);
}
catch(final Exception err)
{
priv_close();
throw new ItemStreamException(err);
}
}
示例11: createVariantContextWriterToOutputStream
import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder; //导入方法依赖的package包/类
public static VariantContextWriter createVariantContextWriterToOutputStream(final OutputStream ostream)
{
final VariantContextWriterBuilder vcwb=new VariantContextWriterBuilder();
vcwb.setCreateMD5(false);
vcwb.setOutputStream(ostream);
vcwb.setReferenceDictionary(null);
vcwb.clearOptions();
return new VariantContextWriterDelayedFlush(vcwb.build());
}
示例12: doWork
import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder; //导入方法依赖的package包/类
@Override
protected int doWork() {
IOUtil.assertFileIsReadable(INPUT);
IOUtil.assertFileIsWritable(OUTPUT);
final VCFFileReader reader = new VCFFileReader(INPUT, false);
final VCFHeader inputVcfHeader = new VCFHeader(reader.getFileHeader().getMetaDataInInputOrder());
final SAMSequenceDictionary sequenceDictionary = inputVcfHeader.getSequenceDictionary();
if (CREATE_INDEX && sequenceDictionary == null) {
throw new PicardException("A sequence dictionary must be available (either through the input file or by setting it explicitly) when creating indexed output.");
}
final ProgressLogger progress = new ProgressLogger(Log.getInstance(MakeSitesOnlyVcf.class), 10000);
// Setup the site-only file writer
final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
.setOutputFile(OUTPUT)
.setReferenceDictionary(sequenceDictionary);
if (CREATE_INDEX)
builder.setOption(Options.INDEX_ON_THE_FLY);
else
builder.unsetOption(Options.INDEX_ON_THE_FLY);
final VariantContextWriter writer = builder.build();
final VCFHeader header = new VCFHeader(inputVcfHeader.getMetaDataInInputOrder(), SAMPLE);
writer.writeHeader(header);
// Go through the input, strip the records and write them to the output
final CloseableIterator<VariantContext> iterator = reader.iterator();
while (iterator.hasNext()) {
final VariantContext full = iterator.next();
final VariantContext site = subsetToSamplesWithOriginalAnnotations(full, SAMPLE);
writer.add(site);
progress.record(site.getContig(), site.getStart());
}
CloserUtil.close(iterator);
CloserUtil.close(reader);
writer.close();
return 0;
}
示例13: doWork
import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder; //导入方法依赖的package包/类
@Override
protected int doWork() {
final ProgressLogger progress = new ProgressLogger(log, 10000);
final List<String> sampleList = new ArrayList<String>();
INPUT = IOUtil.unrollFiles(INPUT, IOUtil.VCF_EXTENSIONS);
final Collection<CloseableIterator<VariantContext>> iteratorCollection = new ArrayList<CloseableIterator<VariantContext>>(INPUT.size());
final Collection<VCFHeader> headers = new HashSet<VCFHeader>(INPUT.size());
VariantContextComparator variantContextComparator = null;
SAMSequenceDictionary sequenceDictionary = null;
if (SEQUENCE_DICTIONARY != null) {
sequenceDictionary = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).open(SEQUENCE_DICTIONARY).getFileHeader().getSequenceDictionary();
}
for (final File file : INPUT) {
IOUtil.assertFileIsReadable(file);
final VCFFileReader fileReader = new VCFFileReader(file, false);
final VCFHeader fileHeader = fileReader.getFileHeader();
if (variantContextComparator == null) {
variantContextComparator = fileHeader.getVCFRecordComparator();
} else {
if (!variantContextComparator.isCompatible(fileHeader.getContigLines())) {
throw new IllegalArgumentException(
"The contig entries in input file " + file.getAbsolutePath() + " are not compatible with the others.");
}
}
if (sequenceDictionary == null) sequenceDictionary = fileHeader.getSequenceDictionary();
if (sampleList.isEmpty()) {
sampleList.addAll(fileHeader.getSampleNamesInOrder());
} else {
if (!sampleList.equals(fileHeader.getSampleNamesInOrder())) {
throw new IllegalArgumentException("Input file " + file.getAbsolutePath() + " has sample entries that don't match the other files.");
}
}
headers.add(fileHeader);
iteratorCollection.add(fileReader.iterator());
}
if (CREATE_INDEX && sequenceDictionary == null) {
throw new PicardException("A sequence dictionary must be available (either through the input file or by setting it explicitly) when creating indexed output.");
}
final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
.setOutputFile(OUTPUT)
.setReferenceDictionary(sequenceDictionary);
if (CREATE_INDEX) {
builder.setOption(Options.INDEX_ON_THE_FLY);
} else {
builder.unsetOption(Options.INDEX_ON_THE_FLY);
}
final VariantContextWriter writer = builder.build();
writer.writeHeader(new VCFHeader(VCFUtils.smartMergeHeaders(headers, false), sampleList));
final MergingIterator<VariantContext> mergingIterator = new MergingIterator<VariantContext>(variantContextComparator, iteratorCollection);
while (mergingIterator.hasNext()) {
final VariantContext context = mergingIterator.next();
writer.add(context);
progress.record(context.getContig(), context.getStart());
}
CloserUtil.close(mergingIterator);
writer.close();
return 0;
}
示例14: doWork
import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder; //导入方法依赖的package包/类
@Override
protected int doWork() {
IOUtil.assertFileIsReadable(INPUT);
IOUtil.assertFileIsReadable(SEQUENCE_DICTIONARY);
IOUtil.assertFileIsWritable(OUTPUT);
final SAMSequenceDictionary samSequenceDictionary = SAMSequenceDictionaryExtractor.extractDictionary(SEQUENCE_DICTIONARY.toPath());
final VCFFileReader fileReader = new VCFFileReader(INPUT, false);
final VCFHeader fileHeader = fileReader.getFileHeader();
final VariantContextWriterBuilder builder = new VariantContextWriterBuilder()
.setReferenceDictionary(samSequenceDictionary)
.clearOptions();
if (CREATE_INDEX)
builder.setOption(Options.INDEX_ON_THE_FLY);
try {
builder.setOutputStream(new FileOutputStream(OUTPUT));
} catch (final FileNotFoundException ex ) {
throw new PicardException("Could not open " + OUTPUT.getAbsolutePath() + ": " + ex.getMessage(), ex);
}
final VariantContextWriter vcfWriter = builder.build();
fileHeader.setSequenceDictionary(samSequenceDictionary);
vcfWriter.writeHeader(fileHeader);
final ProgressLogger progress = new ProgressLogger(log, 10000);
final CloseableIterator<VariantContext> iterator = fileReader.iterator();
while (iterator.hasNext()) {
final VariantContext context = iterator.next();
vcfWriter.add(context);
progress.record(context.getContig(), context.getStart());
}
CloserUtil.close(iterator);
CloserUtil.close(fileReader);
vcfWriter.close();
return 0;
}
示例15: doVcfToVcf
import htsjdk.variant.variantcontext.writer.VariantContextWriterBuilder; //导入方法依赖的package包/类
private int doVcfToVcf(String inputName, final VcfIterator vcfIn,final File outFile) throws IOException {
SortingVCFWriter sortingVCW=null;
VariantContextWriterBuilder vcwb=new VariantContextWriterBuilder();
VariantContextWriter w=null;
try {
SAMSequenceDictionary dict = vcfIn.getHeader().getSequenceDictionary();
if(dict!=null) vcwb.setReferenceDictionary(dict);
vcwb.setOutputFile(outFile);
vcwb.setOutputFileType(OutputType.BLOCK_COMPRESSED_VCF);
w = vcwb.build();
if(this.sort)
{
LOG.info("Creating a sorting writer");
sortingVCW= new SortingVCFWriter(w);
w=sortingVCW;
}
w.writeHeader(vcfIn.getHeader());
SAMSequenceDictionaryProgress progress=new SAMSequenceDictionaryProgress(vcfIn.getHeader());
while(vcfIn.hasNext())
{
w.add(progress.watch(vcfIn.next()));
}
progress.finish();
w.close();
w=null;
return RETURN_OK;
}
catch (Exception e)
{
if(outFile.exists() && outFile.isFile())
{
LOG.warn("Deleting "+outFile);
outFile.delete();
File tbi = new File(outFile.getPath()+TabixUtils.STANDARD_INDEX_EXTENSION);
if(tbi.exists() && tbi.isFile()) tbi.delete();
}
LOG.error(e);
return -1;
}
finally
{
CloserUtil.close(w);
}
}