本文整理汇总了Java中htsjdk.samtools.SAMException类的典型用法代码示例。如果您正苦于以下问题:Java SAMException类的具体用法?Java SAMException怎么用?Java SAMException使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
SAMException类属于htsjdk.samtools包,在下文中一共展示了SAMException类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: findReferenceFile
import htsjdk.samtools.SAMException; //导入依赖的package包/类
public static ReferenceSequenceFile findReferenceFile(String fileName)
throws FileNotFoundException {
ReferenceSequenceFile rsf;
File refFile = new File(fileName);
try {
rsf = ReferenceSequenceFileFactory
.getReferenceSequenceFile(refFile);
} catch (SAMException ex) {
System.err.println("Could not load reference sequence file \""
+ refFile + "\".");
throw new FileNotFoundException(refFile.toString());
}
return rsf;
}
示例2: indexBam
import htsjdk.samtools.SAMException; //导入依赖的package包/类
public void indexBam(File bamFile, File baiFile) {
SAMFileReader.setDefaultValidationStringency(ValidationStringency.SILENT);
final SamReader bam;
// input from a normal file
IOUtil.assertFileIsReadable(bamFile);
bam = SamReaderFactory.makeDefault().referenceSequence(null)
.enable(SamReaderFactory.Option.INCLUDE_SOURCE_IN_RECORDS)
.open(bamFile);
if (bam.type() != SamReader.Type.BAM_TYPE) {
throw new SAMException("Input file must be bam file, not sam file.");
}
if (!bam.getFileHeader().getSortOrder().equals(SAMFileHeader.SortOrder.coordinate)) {
throw new SAMException("Input bam file must be sorted by coordinate");
}
BAMIndexer.createIndex(bam, baiFile);
CloserUtil.close(bam);
}
示例3: PicardIndexedFastaSequenceFile
import htsjdk.samtools.SAMException; //导入依赖的package包/类
/**
* Open the given indexed fasta sequence file. Throw an exception if the file cannot be opened.
* @param path The file to open.
* @param index Pre-built FastaSequenceIndex, for the case in which one does not exist on disk.
*/
public PicardIndexedFastaSequenceFile(final Path path, final FastaSequenceIndex index) {
super(path);
if (index == null) throw new IllegalArgumentException("Null index for fasta " + path);
this.index = index;
IOUtil.assertFileIsReadable(path);
try {
this.channel = Files.newByteChannel(path);
} catch (IOException e) {
throw new SAMException("Fasta file should be readable but is not: " + path, e);
}
reset();
if(getSequenceDictionary() != null)
sanityCheckDictionaryAgainstIndex(path.toAbsolutePath().toString(),sequenceDictionary,index);
}
示例4: sanityCheckDictionaryAgainstIndex
import htsjdk.samtools.SAMException; //导入依赖的package包/类
/**
* Do some basic checking to make sure the dictionary and the index match.
* @param fastaFile Used for error reporting only.
* @param sequenceDictionary sequence dictionary to check against the index.
* @param index index file to check against the dictionary.
*/
protected static void sanityCheckDictionaryAgainstIndex(final String fastaFile,
final SAMSequenceDictionary sequenceDictionary,
final FastaSequenceIndex index) {
// Make sure dictionary and index are the same size.
if( sequenceDictionary.getSequences().size() != index.size() )
throw new SAMException("Sequence dictionary and index contain different numbers of contigs");
Iterator<SAMSequenceRecord> sequenceIterator = sequenceDictionary.getSequences().iterator();
Iterator<FastaSequenceIndexEntry> indexIterator = index.iterator();
while(sequenceIterator.hasNext() && indexIterator.hasNext()) {
SAMSequenceRecord sequenceEntry = sequenceIterator.next();
FastaSequenceIndexEntry indexEntry = indexIterator.next();
if(!sequenceEntry.getSequenceName().equals(indexEntry.getContig())) {
throw new SAMException(String.format("Mismatch between sequence dictionary fasta index for %s, sequence '%s' != '%s'.",
fastaFile, sequenceEntry.getSequenceName(),indexEntry.getContig()));
}
// Make sure sequence length matches index length.
if( sequenceEntry.getSequenceLength() != indexEntry.getSize())
throw new SAMException("Index length does not match dictionary length for contig: " + sequenceEntry.getSequenceName() );
}
}
示例5: getReadsFromHadoopBam
import htsjdk.samtools.SAMException; //导入依赖的package包/类
/**
* Ingest a BAM file from a Hadoop file system and loads into a
* <code>PCollection<Read></code>.
* @param pipeline a configured Pipeline
* @param intervals intervals to select reads from
* @param bam Hadoop file path to read from
* @return a <code>PCollection<Read></code> with all the reads that overlap the
* given intervals in the BAM file
*/
@SuppressWarnings("unchecked")
public static PCollection<GATKRead> getReadsFromHadoopBam(final Pipeline pipeline, final List<SimpleInterval> intervals, final ValidationStringency stringency, final String bam) {
PCollection<KV<LongWritable, SAMRecordWritable>> input = pipeline.apply(
HadoopIO.Read.from(bam, AnySAMInputFormat.class, LongWritable.class, SAMRecordWritable.class));
return input.apply(ParDo.of(new DoFn<KV<LongWritable, SAMRecordWritable>, GATKRead>() {
private static final long serialVersionUID = 1L;
@Override
public void processElement( ProcessContext c ) throws Exception {
SAMRecord sam = c.element().getValue().get();
if ( samRecordOverlaps(sam, intervals) ) {
try {
c.output(new SAMRecordToGATKReadAdapter(sam));
}
catch ( SAMException e ) {
if ( stringency == ValidationStringency.STRICT ) {
throw e;
}
else if ( stringency == ValidationStringency.LENIENT ) {
logger.info("getReadsFromHadoopBam: " + e.getMessage());
}
// do nothing if silent
}
}
}
}));
}
示例6: getReaderIntervalIterator
import htsjdk.samtools.SAMException; //导入依赖的package包/类
/** Gets the interval iterator from a fresh reader, including unmapped reads. */
@Override
protected Iterator<GATKRead> getReaderIntervalIterator(final SamReader reader,
final List<SimpleInterval> locs) {
try {
return new SAMRecordToReadIterator(new SamReaderQueryingIterator(reader, locs, false));
} catch (SAMException | UserException e) {
throw new UnsupportedOperationException(e.getMessage(), e);
}
}
示例7: openSAMWriter
import htsjdk.samtools.SAMException; //导入依赖的package包/类
/** Open a new SAM/BAM/CRAM writer from a Path. */
public SAMFileWriter openSAMWriter(final SAMFileHeader header, final boolean presorted,
final Path output) {
checkOutputAndCreateDirs(output);
try {
return samFactory.makeWriter(header, presorted, output.toFile(), referenceFile);
} catch (final SAMException e) {
// catch SAM exceptions as IO errors -> this are the ones that may fail
throw new UserException.CouldNotCreateOutputFile(output.toFile(), e.getMessage(), e);
}
}
示例8: getSequentialFileList
import htsjdk.samtools.SAMException; //导入依赖的package包/类
/**
* Get a list of FASTQs that are sequentially numbered based on the first (base) fastq.
* The files should be named:
* <prefix>_001.<extension>, <prefix>_002.<extension>, ..., <prefix>_XYZ.<extension>
* The base files should be:
* <prefix>_001.<extension>
* An example would be:
* RUNNAME_S8_L005_R1_001.fastq
* RUNNAME_S8_L005_R1_002.fastq
* RUNNAME_S8_L005_R1_003.fastq
* RUNNAME_S8_L005_R1_004.fastq
* where `baseFastq` is the first in that list.
*/
protected static List<File> getSequentialFileList(final File baseFastq) {
final List<File> files = new ArrayList<File>();
files.add(baseFastq);
// Find the correct extension used in the base FASTQ
FastqExtensions fastqExtensions = null;
String suffix = null; // store the suffix including the extension
for (final FastqExtensions ext : FastqExtensions.values()) {
suffix = "_001" + ext.getExtension();
if (baseFastq.getAbsolutePath().endsWith(suffix)) {
fastqExtensions = ext;
break;
}
}
if (null == fastqExtensions) {
throw new PicardException(String.format("Could not parse the FASTQ extension (expected '_001' + '%s'): %s", FastqExtensions.values().toString(), baseFastq));
}
// Find all the files
for (int idx = 2; true; idx++) {
String fastq = baseFastq.getAbsolutePath();
fastq = String.format("%s_%03d%s", fastq.substring(0, fastq.length() - suffix.length()), idx, fastqExtensions.getExtension());
try {
IOUtil.assertFileIsReadable(new File(fastq));
} catch (final SAMException e) { // the file is not readable, so do not continue
break;
}
files.add(new File(fastq));
}
return files;
}
示例9: forFile
import htsjdk.samtools.SAMException; //导入依赖的package包/类
static IntervalListInputType forFile(final File intervalListExtractable) {
for (final IntervalListInputType intervalListInputType : IntervalListInputType.values()) {
for (final String s : intervalListInputType.applicableExtensions) {
if (intervalListExtractable.getName().endsWith(s)) {
return intervalListInputType;
}
}
}
throw new SAMException("Cannot figure out type of file " + intervalListExtractable.getAbsolutePath() + " from extension. Current implementation understands the following types: " + Arrays.toString(IntervalListInputType.values()));
}
示例10: readInvalidValuesForSAMException
import htsjdk.samtools.SAMException; //导入依赖的package包/类
@Test(dataProvider = "failingFilesForSAMException", expectedExceptions = SAMException.class)
public void readInvalidValuesForSAMException(final String failingFile) {
final FilterFileReader reader = new FilterFileReader(new File(TEST_DATA_DIR, failingFile));
while(reader.hasNext()) {
reader.next();
}
}
示例11: basedirDoesntExistTest
import htsjdk.samtools.SAMException; //导入依赖的package包/类
@Test(expectedExceptions = SAMException.class)
public void basedirDoesntExistTest() {
final String[] args = makeCheckerArgs(new File("a_made_up_file/in_some_weird_location"), 1, "76T76T",
new IlluminaDataType[]{IlluminaDataType.Position},
new ArrayList<>(), false, false);
runPicardCommandLine(args);
}
示例12: testBadGroupedFileOutputPerRg
import htsjdk.samtools.SAMException; //导入依赖的package包/类
@Test (dataProvider = "badGroupedFiles", expectedExceptions=SAMException.class)
public void testBadGroupedFileOutputPerRg(final String samFilename) throws IOException {
convertFile(new String[]{
"INPUT=" + TEST_DATA_DIR + "/" + samFilename,
"OUTPUT_DIR=" + IOUtil.getDefaultTmpDir().getAbsolutePath() + "/",
"OUTPUT_PER_RG=true"
});
}
示例13: testBadData
import htsjdk.samtools.SAMException; //导入依赖的package包/类
@Test(dataProvider = "badData", expectedExceptions = {MalformedFeatureFile.class, SAMException.class})
public void testBadData(final File inputVcf,
final File outputLoc,
final File genotypesFile,
final File haplotypeFile) {
String[] args = new String[]{
"I=" + inputVcf,
"O=" + outputLoc,
"G=" + genotypesFile,
"H=" + haplotypeFile
};
Assert.assertEquals(runPicardCommandLine(args), 0);
}
示例14: setupBuilder
import htsjdk.samtools.SAMException; //导入依赖的package包/类
@BeforeTest
void setupBuilder() throws IOException {
tempSamFileChrM_O = File.createTempFile("CollectGcBias", ".bam", TEST_DIR);
tempSamFileAllChr = File.createTempFile("CollectGcBias", ".bam", TEST_DIR);
tempSamFileChrM_O.deleteOnExit();
tempSamFileAllChr.deleteOnExit();
final File tempSamFileUnsorted = File.createTempFile("CollectGcBias", ".bam", TEST_DIR);
tempSamFileUnsorted.deleteOnExit();
final SAMFileHeader header = new SAMFileHeader();
try {
header.setSequenceDictionary(SAMSequenceDictionaryExtractor.extractDictionary(dict.toPath()));
header.setSortOrder(SAMFileHeader.SortOrder.unsorted);
} catch (final SAMException e) {
e.printStackTrace();
}
//build different levels to put into the same bam file for testing multi level collection
setupTest1(1, readGroupId1, readGroupRecord1, sample1, library1, header, setBuilder1); //Sample 1, Library 1, RG 1
setupTest1(2, readGroupId2, readGroupRecord2, sample1, library2, header, setBuilder2); //Sample 1, Library 2, RG 2
setupTest1(3, readGroupId3, readGroupRecord3, sample2, library3, header, setBuilder3); //Sample 2, Library 3, RG 3
//build one last readgroup for comparing that window count stays the same whether you use all contigs or not
setupTest2(1, readGroupId1, readGroupRecord1, sample1, library1, header, setBuilder4);
final List<SAMRecordSetBuilder> test1Builders = new ArrayList<SAMRecordSetBuilder>();
test1Builders.add(setBuilder1);
test1Builders.add(setBuilder2);
test1Builders.add(setBuilder3);
final List<SAMRecordSetBuilder> test2Builders = new ArrayList<SAMRecordSetBuilder>();
test2Builders.add(setBuilder4);
tempSamFileChrM_O = build(test1Builders, tempSamFileUnsorted, header);
tempSamFileAllChr = build(test2Builders, tempSamFileUnsorted, header);
}
示例15: doTest
import htsjdk.samtools.SAMException; //导入依赖的package包/类
private void doTest(final String inputBed, final String header) throws IOException, SAMException {
final File outputFile = File.createTempFile("bed_to_interval_list_test.", ".interval_list");
outputFile.deleteOnExit();
final BedToIntervalList program = new BedToIntervalList();
final File inputBedFile = new File(TEST_DATA_DIR, inputBed);
program.INPUT = inputBedFile;
program.SEQUENCE_DICTIONARY = new File(TEST_DATA_DIR, header);
program.OUTPUT = outputFile;
program.UNIQUE = true;
program.doWork();
// Assert they are equal
IOUtil.assertFilesEqual(new File(inputBedFile.getAbsolutePath() + ".interval_list"), outputFile);
}