本文整理汇总了Java中org.apache.kafka.common.Metric类的典型用法代码示例。如果您正苦于以下问题:Java Metric类的具体用法?Java Metric怎么用?Java Metric使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
Metric类属于org.apache.kafka.common包,在下文中一共展示了Metric类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: sendData
import org.apache.kafka.common.Metric; //导入依赖的package包/类
public void sendData(String data) {
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("acks", "all");
props.put("retries", 0);
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
Producer<String, String> producer = new KafkaProducer<>(props);
Map<MetricName, ? extends Metric> metrics = producer.metrics();
System.out.println(metrics);
for (int i = 0; i < 100; i++)
producer.send(new ProducerRecord<String, String>("video_view", data));
producer.close();
}
示例2: testLatencyMetrics
import org.apache.kafka.common.Metric; //导入依赖的package包/类
@Test
public void testLatencyMetrics() {
String groupName = "doesNotMatter";
String scope = "scope";
String entity = "entity";
String operation = "put";
Map<String, String> tags = new HashMap<>();
StreamsMetricsImpl streamsMetrics = new StreamsMetricsImpl(new Metrics(), groupName, tags);
Sensor sensor1 = streamsMetrics.addLatencyAndThroughputSensor(scope, entity, operation, Sensor.RecordingLevel.DEBUG);
Map<MetricName, ? extends Metric> metrics = streamsMetrics.metrics();
// 6 metrics plus a common metric that keeps track of total registered metrics in Metrics() constructor
assertEquals(metrics.size(), 7);
streamsMetrics.removeSensor(sensor1);
metrics = streamsMetrics.metrics();
assertEquals(metrics.size(), 1);
}
示例3: testThroughputMetrics
import org.apache.kafka.common.Metric; //导入依赖的package包/类
@Test
public void testThroughputMetrics() {
String groupName = "doesNotMatter";
String scope = "scope";
String entity = "entity";
String operation = "put";
Map<String, String> tags = new HashMap<>();
StreamsMetricsImpl streamsMetrics = new StreamsMetricsImpl(new Metrics(), groupName, tags);
Sensor sensor1 = streamsMetrics.addThroughputSensor(scope, entity, operation, Sensor.RecordingLevel.DEBUG);
Map<MetricName, ? extends Metric> metrics = streamsMetrics.metrics();
// 2 metrics plus a common metric that keeps track of total registered metrics in Metrics() constructor
assertEquals(metrics.size(), 3);
streamsMetrics.removeSensor(sensor1);
metrics = streamsMetrics.metrics();
assertEquals(metrics.size(), 1);
}
示例4: printMetrics
import org.apache.kafka.common.Metric; //导入依赖的package包/类
/**
* print out the metrics in alphabetical order
* @param metrics the metrics to be printed out
*/
public static void printMetrics(Map<MetricName, ? extends Metric> metrics) {
if (metrics != null && !metrics.isEmpty()) {
int maxLengthOfDisplayName = 0;
TreeMap<String, Double> sortedMetrics = new TreeMap<>(new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
return o1.compareTo(o2);
}
});
for (Metric metric : metrics.values()) {
MetricName mName = metric.metricName();
String mergedName = mName.group() + ":" + mName.name() + ":" + mName.tags();
maxLengthOfDisplayName = maxLengthOfDisplayName < mergedName.length() ? mergedName.length() : maxLengthOfDisplayName;
sortedMetrics.put(mergedName, metric.value());
}
String outputFormat = "%-" + maxLengthOfDisplayName + "s : %.3f";
System.out.println(String.format("\n%-" + maxLengthOfDisplayName + "s %s", "Metric Name", "Value"));
for (Map.Entry<String, Double> entry : sortedMetrics.entrySet()) {
System.out.println(String.format(outputFormat, entry.getKey(), entry.getValue()));
}
}
}
示例5: metricName
import org.apache.kafka.common.Metric; //导入依赖的package包/类
protected String metricName(Metric metric) {
MetricName name = metric.metricName();
StringBuilder builder = new StringBuilder();
builder.append(this.prefix);
builder.append('.');
for (Map.Entry<String, String> entry : new TreeMap<>(name.tags()).entrySet()) {
if (!entry.getKey().isEmpty() && !entry.getValue().isEmpty()) {
builder.append(entry.getValue());
builder.append('.');
}
}
builder.append(name.group());
builder.append('.');
builder.append(name.name());
return builder.toString();
}
示例6: sendAMetric
import org.apache.kafka.common.Metric; //导入依赖的package包/类
private void sendAMetric(
String metricName
) {
Metric metric= registry.getMetric(metricName);
String tag = registry.getTag(metricName);
final Object value = metric.value();
Double val = new Double(value.toString());
if (val == Double.NEGATIVE_INFINITY || val == Double.POSITIVE_INFINITY) {
val = 0D;
}
if (tag != null) {
statsDClient.gauge(metricName, val, tag);
} else {
statsDClient.gauge(metricName, val);
}
}
示例7: sendDoubleGauge
import org.apache.kafka.common.Metric; //导入依赖的package包/类
@Test
public final void sendDoubleGauge() throws Exception {
final double value = 10.11;
Metric metric = new Metric() {
@Override
public MetricName metricName() {
return new MetricName("test-metric", "group");
}
@Override
public double value() {
return value;
}
};
addMetricAndRunReporter("foo", metric, "bar");
verify(statsD).gauge(Matchers.eq("foo"), Matchers.eq(value), Matchers.eq("bar"));
}
示例8: getStat
import org.apache.kafka.common.Metric; //导入依赖的package包/类
@Override
public String getStat() {
Map<MetricName,? extends Metric> metrics = producer.metrics();
StringBuilder sb = new StringBuilder();
// add kafka producer stats, which are rates
for( Map.Entry<MetricName,? extends Metric> e : metrics.entrySet() ){
sb.append("kafka.").append(e.getKey()).append(": ").append(e.getValue().value()).append('\n');
}
// also report our counters
sb.append("messages-in-queue4sink: ").append( this.queue4Sink.size() ).append('\n');
sb.append("queued-jobs: ").append( this.jobQueue.size() ).append('\n');
sb.append("active-threads: ").append( this.senders.getActiveCount() ).append('\n');
sb.append("received-messages: ").append( this.receivedCount.get() ).append('\n');
sb.append("sent-messages: ").append( this.sentCount.get() ).append('\n');
sb.append("sent-bytes: ").append( this.sentByteCount.get() ).append('\n');
sb.append("dropped-messages: ").append( this.droppedCount.get() ).append('\n');
sb.append("requeued-messages: ").append( this.requeuedCount.get() ).append('\n');
return sb.toString();
}
示例9: metrics
import org.apache.kafka.common.Metric; //导入依赖的package包/类
@Override
public Map<MetricName, ? extends Metric> metrics() {
return Retries.tryMe(new Callable<Map<MetricName, ? extends Metric>>() {
@Override
public Map<MetricName, ? extends Metric> call() throws Exception {
return inner.metrics();
}
}, strategy());
}
示例10: testNumberDefaultMetrics
import org.apache.kafka.common.Metric; //导入依赖的package包/类
@Test
public void testNumberDefaultMetrics() {
final KafkaStreams streams = createKafkaStreams();
final Map<MetricName, ? extends Metric> metrics = streams.metrics();
// all 15 default StreamThread metrics + 1 metric that keeps track of number of metrics
assertEquals(metrics.size(), 16);
}
示例11: testPercentiles
import org.apache.kafka.common.Metric; //导入依赖的package包/类
@Test
public void testPercentiles() {
int buckets = 100;
Percentiles percs = new Percentiles(4 * buckets,
0.0,
100.0,
BucketSizing.CONSTANT,
new Percentile(metrics.metricName("test.p25", "grp1"), 25),
new Percentile(metrics.metricName("test.p50", "grp1"), 50),
new Percentile(metrics.metricName("test.p75", "grp1"), 75));
MetricConfig config = new MetricConfig().eventWindow(50).samples(2);
Sensor sensor = metrics.sensor("test", config);
sensor.add(percs);
Metric p25 = this.metrics.metrics().get(metrics.metricName("test.p25", "grp1"));
Metric p50 = this.metrics.metrics().get(metrics.metricName("test.p50", "grp1"));
Metric p75 = this.metrics.metrics().get(metrics.metricName("test.p75", "grp1"));
// record two windows worth of sequential values
for (int i = 0; i < buckets; i++)
sensor.record(i);
assertEquals(25, p25.value(), 1.0);
assertEquals(50, p50.value(), 1.0);
assertEquals(75, p75.value(), 1.0);
for (int i = 0; i < buckets; i++)
sensor.record(0.0);
assertEquals(0.0, p25.value(), 1.0);
assertEquals(0.0, p50.value(), 1.0);
assertEquals(0.0, p75.value(), 1.0);
// record two more windows worth of sequential values
for (int i = 0; i < buckets; i++)
sensor.record(i);
assertEquals(25, p25.value(), 1.0);
assertEquals(50, p50.value(), 1.0);
assertEquals(75, p75.value(), 1.0);
}
示例12: testPercentiles
import org.apache.kafka.common.Metric; //导入依赖的package包/类
@Test
public void testPercentiles() {
int buckets = 100;
Percentiles percs = new Percentiles(4 * buckets,
0.0,
100.0,
BucketSizing.CONSTANT,
new Percentile(metrics.metricName("test.p25", "grp1"), 25),
new Percentile(metrics.metricName("test.p50", "grp1"), 50),
new Percentile(metrics.metricName("test.p75", "grp1"), 75));
MetricConfig config = new MetricConfig().eventWindow(50).samples(2);
Sensor sensor = metrics.sensor("test", config);
sensor.add(percs);
Metric p25 = this.metrics.metrics().get(metrics.metricName("test.p25", "grp1"));
Metric p50 = this.metrics.metrics().get(metrics.metricName("test.p50", "grp1"));
Metric p75 = this.metrics.metrics().get(metrics.metricName("test.p75", "grp1"));
// record two windows worth of sequential values
for (int i = 0; i < buckets; i++)
sensor.record(i);
assertEquals(25, p25.value(), 1.0);
assertEquals(50, p50.value(), 1.0);
assertEquals(75, p75.value(), 1.0);
for (int i = 0; i < buckets; i++)
sensor.record(0.0);
assertEquals(0.0, p25.value(), 1.0);
assertEquals(0.0, p50.value(), 1.0);
assertEquals(0.0, p75.value(), 1.0);
}
示例13: getValue_withKafkaMetricContainer_same
import org.apache.kafka.common.Metric; //导入依赖的package包/类
/**
* Getter returns the same instance as was set to constructor.
*/
@Test
public void getValue_withKafkaMetricContainer_same() {
Metric metric = randomMetric();
KafkaMetricContainer metricContainer = new KafkaMetricContainer(metric, "test");
assertThat(metricContainer.getValue()).isSameAs(metric);
}
示例14: updateMetrics
import org.apache.kafka.common.Metric; //导入依赖的package包/类
void updateMetrics(String[] clusters, Map<String, Map<MetricName, ? extends Metric>> metricsMap)
{
long current = System.currentTimeMillis();
if (current - lastMetricSampleTime < metricsRefreshInterval) {
return;
}
lastMetricSampleTime = current;
if (stats == null) {
stats = new KafkaConsumerStats[clusters.length];
}
for (int i = 0; i < clusters.length; i++) {
if (stats[i] == null) {
stats[i] = new KafkaConsumerStats();
stats[i].cluster = clusters[i];
}
Map<MetricName, ? extends Metric> cMetrics = metricsMap.get(clusters[i]);
if (cMetrics == null || cMetrics.isEmpty()) {
stats[i].bytesPerSec = 0;
stats[i].msgsPerSec = 0;
continue;
}
if (stats[i].bytePerSecMK == null || stats[i].msgPerSecMK == null) {
for (MetricName mn : cMetrics.keySet()) {
if (mn.name().equals("bytes-consumed-rate")) {
stats[i].bytePerSecMK = mn;
} else if (mn.name().equals("records-consumed-rate")) {
stats[i].msgPerSecMK = mn;
}
}
}
stats[i].bytesPerSec = cMetrics.get(stats[i].bytePerSecMK).value();
stats[i].msgsPerSec = cMetrics.get(stats[i].msgPerSecMK).value();
}
}
示例15: getAllConsumerMetrics
import org.apache.kafka.common.Metric; //导入依赖的package包/类
public Map<String, Map<MetricName, ? extends Metric>> getAllConsumerMetrics()
{
Map<String, Map<MetricName, ? extends Metric>> val = new HashMap<>();
for (Map.Entry<String, AbstractKafkaConsumer> e : consumers.entrySet()) {
val.put(e.getKey(), e.getValue().metrics());
}
return val;
}