本文整理汇总了Java中org.broadinstitute.hellbender.utils.LoggingUtils类的典型用法代码示例。如果您正苦于以下问题:Java LoggingUtils类的具体用法?Java LoggingUtils怎么用?Java LoggingUtils使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
LoggingUtils类属于org.broadinstitute.hellbender.utils包,在下文中一共展示了LoggingUtils类的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: dataBiasCorrection
import org.broadinstitute.hellbender.utils.LoggingUtils; //导入依赖的package包/类
@DataProvider(name = "biasCorrection")
public Object[][] dataBiasCorrection() {
LoggingUtils.setLoggingLevel(Log.LogLevel.INFO);
final AllelicCountCollection sampleNormal = new AllelicCountCollection(SAMPLE_NORMAL_FILE);
final AllelicCountCollection sampleWithBadSNPs = new AllelicCountCollection(SAMPLE_WITH_BAD_SNPS_FILE);
final AllelicCountCollection sampleWithEvent = new AllelicCountCollection(SAMPLE_WITH_EVENT_FILE);
final AllelicPanelOfNormals allelicPoNNormal = new AllelicPanelOfNormals(new AllelicCountCollection(ALLELIC_PON_NORMAL_COUNTS_FILE));
final AllelicPanelOfNormals allelicPoNWithBadSNPs = new AllelicPanelOfNormals(new AllelicCountCollection(ALLELIC_PON_WITH_BAD_SNPS_COUNTS_FILE));
final double minorFractionExpectedInMiddleSegmentNormal = 0.5;
final double minorFractionExpectedInMiddleSegmentWithBadSNPsAndNormalPoN = 0.4;
final double minorFractionExpectedInMiddleSegmentWithEvent = 0.33;
return new Object[][]{
{sampleNormal, allelicPoNNormal, minorFractionExpectedInMiddleSegmentNormal},
{sampleWithBadSNPs, allelicPoNNormal, minorFractionExpectedInMiddleSegmentWithBadSNPsAndNormalPoN},
{sampleWithEvent, allelicPoNNormal, minorFractionExpectedInMiddleSegmentWithEvent},
{sampleWithBadSNPs, allelicPoNWithBadSNPs, minorFractionExpectedInMiddleSegmentNormal}
};
}
示例2: testPoNHyperparameterInitialization
import org.broadinstitute.hellbender.utils.LoggingUtils; //导入依赖的package包/类
@Test
public void testPoNHyperparameterInitialization() {
LoggingUtils.setLoggingLevel(Log.LogLevel.INFO);
final AllelicPanelOfNormals allelicPoN = new AllelicPanelOfNormals(new AllelicCountCollection(ALLELIC_PON_NORMAL_COUNTS_FILE));
final SimpleInterval firstSite = new SimpleInterval("1", 1, 1);
final SimpleInterval siteNotInPoN = new SimpleInterval("2", 1, 1); //all sites in PoN are from chr1
// test initialization of hyperparameters for first site in PoN (a = 1218, r = 1317)
final double alphaAtFirstSite = allelicPoN.getAlpha(firstSite);
final double betaAtFirstSite = allelicPoN.getBeta(firstSite);
Assert.assertEquals(alphaAtFirstSite, ALPHA_EXPECTED_AT_FIRST_SITE, DELTA);
Assert.assertEquals(betaAtFirstSite, BETA_EXPECTED_AT_FIRST_SITE, DELTA);
// test initialization of MLE hyperparameters (which are default values for sites not in PoN)
final double alphaNotInPoN = allelicPoN.getAlpha(siteNotInPoN);
final double betaNotInPoN = allelicPoN.getBeta(siteNotInPoN);
final double meanBias = allelicPoN.getGlobalMeanBias();
final double biasVariance = allelicPoN.getGlobalBiasVariance();
Assert.assertEquals(alphaNotInPoN, MLE_ALPHA_EXPECTED, DELTA);
Assert.assertEquals(betaNotInPoN, MLE_BETA_EXPECTED, DELTA);
Assert.assertEquals(meanBias, MLE_MEAN_BIAS_EXPECTED, DELTA);
Assert.assertEquals(biasVariance, MLE_BIAS_VARIANCE_EXPECTED, DELTA);
}
示例3: testPrintStackTrace
import org.broadinstitute.hellbender.utils.LoggingUtils; //导入依赖的package包/类
@Test(singleThreaded = true)
public void testPrintStackTrace() throws Exception {
try (final ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
final PrintStream printStream = new PrintStream(outputStream)) {
// set the main class with the custom exception output
final Main main = new Main();
main.exceptionOutput = printStream;
// set to debug mode an try to print the stack-trace
LoggingUtils.setLoggingLevel(Log.LogLevel.DEBUG);
main.printStackTrace(new RuntimeException());
// assert non-empty stack-trace message
Assert.assertFalse(main.exceptionOutput.toString().isEmpty());
} finally {
// set back to normal verbosity
setTestVerbosity();
}
}
示例4: logConfigFields
import org.broadinstitute.hellbender.utils.LoggingUtils; //导入依赖的package包/类
/**
* Logs all the parameters in the given {@link Config} object at the given {@link Log.LogLevel}
* @param config A {@link Config} object from which to log all parameters and values.
* @param logLevel The log {@link htsjdk.samtools.util.Log.LogLevel} at which to log the data in {@code config}
* @param <T> any {@link Config} type to use to log all configuration information.
*/
public static <T extends Config> void logConfigFields(final T config, final Log.LogLevel logLevel) {
Utils.nonNull(config);
Utils.nonNull(logLevel);
final Level level = LoggingUtils.levelToLog4jLevel(logLevel);
// Only continue in this method here if we would log the given level:
if ( !logger.isEnabled(level) ) {
return;
}
logger.log(level, "Configuration file values: ");
for ( final Map.Entry<String, Object> entry : getConfigMap(config, false).entrySet() ) {
logger.log(level, "\t" + entry.getKey() + " = " + entry.getValue());
}
}
示例5: testCreate
import org.broadinstitute.hellbender.utils.LoggingUtils; //导入依赖的package包/类
@Test(dataProvider = "dataCreate")
public void testCreate(final double siteFrequencyThreshold, final AllelicPanelOfNormals expected) {
LoggingUtils.setLoggingLevel(Log.LogLevel.INFO);
final AllelicPanelOfNormalsCreator allelicPoNCreator = new AllelicPanelOfNormalsCreator(PULLDOWN_FILES);
final AllelicPanelOfNormals result = allelicPoNCreator.create(siteFrequencyThreshold);
AllelicPoNTestUtils.assertAllelicPoNsEqual(result, expected);
}
示例6: addTest
import org.broadinstitute.hellbender.utils.LoggingUtils; //导入依赖的package包/类
@Test
void addTest() {
LoggingUtils.setLoggingLevel(Log.LogLevel.DEBUG);
final LongBloomFilter bloomFilter = new LongBloomFilter(testVals.length, FPP);
for (final long val : testVals) {
bloomFilter.add(val);
}
Assert.assertTrue(bloomFilter.containsAll(testVals));
}
示例7: setTestVerbosity
import org.broadinstitute.hellbender.utils.LoggingUtils; //导入依赖的package包/类
@BeforeSuite @Override
public void setTestVerbosity() {
LoggingUtils.setLoggingLevel(Log.LogLevel.INFO);
}
开发者ID:broadinstitute,项目名称:gatk-protected,代码行数:5,代码来源:TargetCoverageSexGenotypeCalculatorUnitTest.java
示例8: doBeforeTest
import org.broadinstitute.hellbender.utils.LoggingUtils; //导入依赖的package包/类
@BeforeTest
private void doBeforeTest() {
LoggingUtils.setLoggingLevel(Log.LogLevel.INFO);
}
示例9: setTestVerbosity
import org.broadinstitute.hellbender.utils.LoggingUtils; //导入依赖的package包/类
/** All the tests will have only the error verbosity. */
@BeforeSuite
public void setTestVerbosity() {
LoggingUtils.setLoggingLevel(Log.LogLevel.ERROR);
}
示例10: setTestVerbosity
import org.broadinstitute.hellbender.utils.LoggingUtils; //导入依赖的package包/类
@BeforeSuite
public void setTestVerbosity(){
LoggingUtils.setLoggingLevel(Log.LogLevel.WARNING);
}
示例11: instanceMainPostParseArgs
import org.broadinstitute.hellbender.utils.LoggingUtils; //导入依赖的package包/类
public Object instanceMainPostParseArgs() {
// Provide one temp directory if the caller didn't
if (this.TMP_DIR == null) this.TMP_DIR = new ArrayList<>();
if (this.TMP_DIR.isEmpty()) TMP_DIR.add(IOUtil.getDefaultTmpDir());
// Build the default headers
final ZonedDateTime startDateTime = ZonedDateTime.now();
this.defaultHeaders.add(new StringHeader(commandLine));
this.defaultHeaders.add(new StringHeader("Started on: " + Utils.getDateTimeForDisplay(startDateTime)));
LoggingUtils.setLoggingLevel(VERBOSITY); // propagate the VERBOSITY level to logging frameworks
for (final File f : TMP_DIR) {
// Intentionally not checking the return values, because it may be that the program does not
// need a tmp_dir. If this fails, the problem will be discovered downstream.
if (!f.exists()) f.mkdirs();
f.setReadable(true, false);
f.setWritable(true, false);
System.setProperty("java.io.tmpdir", f.getAbsolutePath()); // in loop so that last one takes effect
}
//Set defaults (note: setting them here means they are not controllable by the user)
if (! useJdkDeflater) {
BlockCompressedOutputStream.setDefaultDeflaterFactory(new IntelDeflaterFactory());
}
if (! useJdkInflater) {
BlockGunzipper.setDefaultInflaterFactory(new IntelInflaterFactory());
}
BucketUtils.setGlobalNIODefaultOptions(NIO_MAX_REOPENS);
if (!QUIET) {
printStartupMessage(startDateTime);
}
try {
return runTool();
} finally {
// Emit the time even if program throws
if (!QUIET) {
final ZonedDateTime endDateTime = ZonedDateTime.now();
final double elapsedMinutes = (Duration.between(startDateTime, endDateTime).toMillis()) / (1000d * 60d);
final String elapsedString = new DecimalFormat("#,##0.00").format(elapsedMinutes);
System.err.println("[" + Utils.getDateTimeForDisplay(endDateTime) + "] " +
getClass().getName() + " done. Elapsed time: " + elapsedString + " minutes.");
System.err.println("Runtime.totalMemory()=" + Runtime.getRuntime().totalMemory());
}
}
}
示例12: testBiasCorrection
import org.broadinstitute.hellbender.utils.LoggingUtils; //导入依赖的package包/类
/**
* Tests that the allelic PoN is appropriately used to correct reference bias. The basic set up for the test data is
* simulated hets at 1000 sites (1:1-1000) across 3 segments. The outer two segments are balanced with
* minor-allele fraction = 0.5; however, in the middle segment consisting of 100 sites (1:451-550), all of the sites
*
* <p>
* 1) are balanced and have biases identical to the sites in the other two segments,
* which are drawn from a gamma distribution with alpha = 65, beta = 60 -> mean bias = 1.083 ("SAMPLE_NORMAL")
* </p>
*
* <p>
* 2) are balanced and have relatively high biases,
* which are drawn from a gamma distribution with alpha = 9, beta = 6 -> mean bias = 1.5 ("SAMPLE_WITH_BAD_SNPS")
* </p>
*
* <p>
* 3) have minor-allele fraction = 0.33, copy ratio = 1.5, and biases identical to the sites in the other two segments,
* which are drawn from a gamma distribution with alpha = 65, beta = 60 -> mean bias = 1.083 ("SAMPLE_EVENT").
* </p>
*
* In this segment, using a PoN that doesn't know about the high reference bias of these sites ("ALLELIC_PON_NORMAL")
* we should infer a minor-allele fraction of 6 / (6 + 9) = 0.40 in scenario 2; however, with a PoN that does know
* about the high bias at these sites ("ALLELIC_PON_WITH_BAD_SNPS") we correctly infer that all of the segments are balanced.
*
* <p>
* Note that alpha and beta are not actually correctly recovered in this PoN via MLE because the biases are
* drawn from a mixture of gamma distributions (as opposed to a single gamma distribution as assumed in the model).
* TODO https://github.com/broadinstitute/gatk-protected/issues/421
* </p>
*/
@Test(dataProvider = "biasCorrection")
public void testBiasCorrection(final AllelicCountCollection sample,
final AllelicPanelOfNormals allelicPoN,
final double minorFractionExpectedInMiddleSegment) {
LoggingUtils.setLoggingLevel(Log.LogLevel.INFO);
final JavaSparkContext ctx = SparkContextFactory.getTestSparkContext();
final double minorFractionTolerance = 0.025;
final Genome genome = new Genome(AlleleFractionSimulatedData.TRIVIAL_TARGETS, sample.getCounts());
final List<SimpleInterval> segments = SegmentUtils.readIntervalsFromSegmentFile(SEGMENTS_FILE);
final SegmentedGenome segmentedGenome = new SegmentedGenome(segments, genome);
final int numSamples = 150;
final int numBurnIn = 50;
final AlleleFractionModeller modeller = new AlleleFractionModeller(segmentedGenome, allelicPoN);
modeller.fitMCMC(numSamples, numBurnIn);
final List<PosteriorSummary> minorAlleleFractionPosteriorSummaries =
modeller.getMinorAlleleFractionsPosteriorSummaries(CREDIBLE_INTERVAL_ALPHA, ctx);
final List<Double> minorFractionsResult = minorAlleleFractionPosteriorSummaries.stream().map(PosteriorSummary::getCenter).collect(Collectors.toList());
final double minorFractionBalanced = 0.5;
final List<Double> minorFractionsExpected = Arrays.asList(minorFractionBalanced, minorFractionExpectedInMiddleSegment, minorFractionBalanced);
for (int segment = 0; segment < 3; segment++) {
Assert.assertEquals(minorFractionsResult.get(segment), minorFractionsExpected.get(segment), minorFractionTolerance);
}
}