本文整理汇总了Java中org.apache.kafka.common.metrics.stats.Min类的典型用法代码示例。如果您正苦于以下问题:Java Min类的具体用法?Java Min怎么用?Java Min使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
Min类属于org.apache.kafka.common.metrics.stats包,在下文中一共展示了Min类的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: NamedCacheMetrics
import org.apache.kafka.common.metrics.stats.Min; //导入依赖的package包/类
public NamedCacheMetrics(StreamsMetrics metrics) {
final String scope = "record-cache";
final String entityName = name;
final String opName = "hitRatio";
final String tagKey = "record-cache-id";
final String tagValue = name;
this.groupName = "stream-" + scope + "-metrics";
this.metrics = (StreamsMetricsImpl) metrics;
this.metricTags = new LinkedHashMap<>();
this.metricTags.put(tagKey, tagValue);
hitRatioSensor = this.metrics.registry().sensor(entityName + "-" + opName, Sensor.RecordingLevel.DEBUG);
hitRatioSensor.add(this.metrics.registry().metricName(entityName + "-" + opName + "-avg", groupName,
"The current count of " + entityName + " " + opName + " operation.", metricTags), new Avg());
hitRatioSensor.add(this.metrics.registry().metricName(entityName + "-" + opName + "-min", groupName,
"The current count of " + entityName + " " + opName + " operation.", metricTags), new Min());
hitRatioSensor.add(this.metrics.registry().metricName(entityName + "-" + opName + "-max", groupName,
"The current count of " + entityName + " " + opName + " operation.", metricTags), new Max());
}
示例2: testSampledStatInitialValue
import org.apache.kafka.common.metrics.stats.Min; //导入依赖的package包/类
@Test
public void testSampledStatInitialValue() {
// initialValue from each SampledStat is set as the initialValue on its Sample.
// The only way to test the initialValue is to infer it by having a SampledStat
// with expired Stats, because their values are reset to the initial values.
// Most implementations of combine on SampledStat end up returning the default
// value, so we can use this. This doesn't work for Percentiles though.
// This test looks a lot like testOldDataHasNoEffect because it's the same
// flow that leads to this state.
Max max = new Max();
Min min = new Min();
Avg avg = new Avg();
Count count = new Count();
Rate.SampledTotal sampledTotal = new Rate.SampledTotal();
long windowMs = 100;
int samples = 2;
MetricConfig config = new MetricConfig().timeWindow(windowMs, TimeUnit.MILLISECONDS).samples(samples);
max.record(config, 50, time.milliseconds());
min.record(config, 50, time.milliseconds());
avg.record(config, 50, time.milliseconds());
count.record(config, 50, time.milliseconds());
sampledTotal.record(config, 50, time.milliseconds());
time.sleep(samples * windowMs);
assertEquals(Double.NEGATIVE_INFINITY, max.measure(config, time.milliseconds()), EPS);
assertEquals(Double.MAX_VALUE, min.measure(config, time.milliseconds()), EPS);
assertEquals(0.0, avg.measure(config, time.milliseconds()), EPS);
assertEquals(0, count.measure(config, time.milliseconds()), EPS);
assertEquals(0.0, sampledTotal.measure(config, time.milliseconds()), EPS);
}
示例3: configureMessageConsumptionByQuerySensor
import org.apache.kafka.common.metrics.stats.Min; //导入依赖的package包/类
private Sensor configureMessageConsumptionByQuerySensor(Metrics metrics) {
Sensor sensor = createSensor(metrics, "message-consumption-by-query");
sensor.add(metrics.metricName("messages-consumed-max", this.metricGroupName), new Max());
sensor.add(metrics.metricName("messages-consumed-min", this.metricGroupName), new Min());
sensor.add(metrics.metricName("messages-consumed-avg", this.metricGroupName), new Avg());
return sensor;
}
示例4: testSimpleStats
import org.apache.kafka.common.metrics.stats.Min; //导入依赖的package包/类
@Test
public void testSimpleStats() throws Exception {
ConstantMeasurable measurable = new ConstantMeasurable();
metrics.addMetric(metrics.metricName("direct.measurable", "grp1", "The fraction of time an appender waits for space allocation."), measurable);
Sensor s = metrics.sensor("test.sensor");
s.add(metrics.metricName("test.avg", "grp1"), new Avg());
s.add(metrics.metricName("test.max", "grp1"), new Max());
s.add(metrics.metricName("test.min", "grp1"), new Min());
s.add(metrics.metricName("test.rate", "grp1"), new Rate(TimeUnit.SECONDS));
s.add(metrics.metricName("test.occurences", "grp1"), new Rate(TimeUnit.SECONDS, new Count()));
s.add(metrics.metricName("test.count", "grp1"), new Count());
s.add(new Percentiles(100, -100, 100, BucketSizing.CONSTANT,
new Percentile(metrics.metricName("test.median", "grp1"), 50.0),
new Percentile(metrics.metricName("test.perc99_9", "grp1"), 99.9)));
Sensor s2 = metrics.sensor("test.sensor2");
s2.add(metrics.metricName("s2.total", "grp1"), new Total());
s2.record(5.0);
int sum = 0;
int count = 10;
for (int i = 0; i < count; i++) {
s.record(i);
sum += i;
}
// prior to any time passing
double elapsedSecs = (config.timeWindowMs() * (config.samples() - 1)) / 1000.0;
assertEquals(String.format("Occurrences(0...%d) = %f", count, count / elapsedSecs), count / elapsedSecs,
metrics.metrics().get(metrics.metricName("test.occurences", "grp1")).value(), EPS);
// pretend 2 seconds passed...
long sleepTimeMs = 2;
time.sleep(sleepTimeMs * 1000);
elapsedSecs += sleepTimeMs;
assertEquals("s2 reflects the constant value", 5.0, metrics.metrics().get(metrics.metricName("s2.total", "grp1")).value(), EPS);
assertEquals("Avg(0...9) = 4.5", 4.5, metrics.metrics().get(metrics.metricName("test.avg", "grp1")).value(), EPS);
assertEquals("Max(0...9) = 9", count - 1, metrics.metrics().get(metrics.metricName("test.max", "grp1")).value(), EPS);
assertEquals("Min(0...9) = 0", 0.0, metrics.metrics().get(metrics.metricName("test.min", "grp1")).value(), EPS);
assertEquals("Rate(0...9) = 1.40625",
sum / elapsedSecs, metrics.metrics().get(metrics.metricName("test.rate", "grp1")).value(), EPS);
assertEquals(String.format("Occurrences(0...%d) = %f", count, count / elapsedSecs),
count / elapsedSecs,
metrics.metrics().get(metrics.metricName("test.occurences", "grp1")).value(), EPS);
assertEquals("Count(0...9) = 10",
(double) count, metrics.metrics().get(metrics.metricName("test.count", "grp1")).value(), EPS);
}